You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by jo...@apache.org on 2016/06/09 13:46:22 UTC

incubator-zeppelin git commit: ZEPPELIN-925 Merge HiveInterpreter into JDBCInterpreter

Repository: incubator-zeppelin
Updated Branches:
  refs/heads/master 164bcd36a -> 43baa0af4


ZEPPELIN-925 Merge HiveInterpreter into JDBCInterpreter

### What is this PR for?
This removes hive module and adds example setting for using Hive in a JdbcInterpreter by using loading dynamic dependencies. It reduces Zeppelin's binary size.

There's no codes' modification except removing hive directory and remove the module from pom.xml

### What type of PR is it?
[Feature]

### Todos
* [x] - Remove hive module
* [x] - Add an example for using Hive in JDBC

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-925

### How should this be tested?
Set the interpreter properties and test it

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jongyoul Lee <jo...@gmail.com>

Closes #943 from jongyoul/ZEPPELIN-925 and squashes the following commits:

eb50c78 [Jongyoul Lee] Fixed typo
45ec667 [Jongyoul Lee] Updated style of jdbc.md
f6b94d6 [Jongyoul Lee] Update hive.md
6aa9c7b [Jongyoul Lee] Revert "Remove hive.md from docs/interpreter"
f6573e3 [Jongyoul Lee] Deleted hive module from pom.xml
f720ed5 [Jongyoul Lee] Updated Documentation
9c58640 [Jongyoul Lee] Remove hive.md from docs/interpreter
cdf1cfc [Jongyoul Lee] Remove hive module and its directory
a38dc28 [Jongyoul Lee] Update example for hive in jdbc.md


Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/43baa0af
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/43baa0af
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/43baa0af

Branch: refs/heads/master
Commit: 43baa0af4958341d74014da25b6d10f176233879
Parents: 164bcd3
Author: Jongyoul Lee <jo...@gmail.com>
Authored: Tue Jun 7 15:43:46 2016 +0900
Committer: Jongyoul Lee <jo...@apache.org>
Committed: Thu Jun 9 22:46:10 2016 +0900

----------------------------------------------------------------------
 README.md                                       |   3 +-
 conf/zeppelin-site.xml.template                 |   2 +-
 docs/development/writingzeppelininterpreter.md  |   2 +-
 docs/index.md                                   |   2 +-
 docs/install/install.md                         |   2 +-
 docs/install/yarn_install.md                    |  12 +-
 docs/interpreter/hive.md                        |  45 +++
 docs/interpreter/jdbc.md                        |  41 ++
 docs/manual/interpreters.md                     |   2 +-
 docs/security/interpreter_authorization.md      |   2 +-
 hive/pom.xml                                    | 165 --------
 .../apache/zeppelin/hive/HiveInterpreter.java   | 391 -------------------
 .../zeppelin/hive/HiveInterpreterTest.java      | 242 ------------
 pom.xml                                         |   1 -
 14 files changed, 97 insertions(+), 815 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 0a27bf8..60c279c 100644
--- a/README.md
+++ b/README.md
@@ -162,8 +162,7 @@ enable 3rd party vendor repository (cloudera)
 ##### `-Pmapr[version]` (optional)
 
 For the MapR Hadoop Distribution, these profiles will handle the Hadoop version. As MapR allows different versions of Spark to be installed, you should specify which version of Spark is installed on the cluster by adding a Spark profile (`-Pspark-1.2`, `-Pspark-1.3`, etc.) as needed.
-For Hive, check the hive/pom.xml and adjust the version installed as well. The correct Maven
-artifacts can be found for every version of MapR at http://doc.mapr.com
+The correct Maven artifacts can be found for every version of MapR at http://doc.mapr.com
 
 Available profiles are
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/conf/zeppelin-site.xml.template
----------------------------------------------------------------------
diff --git a/conf/zeppelin-site.xml.template b/conf/zeppelin-site.xml.template
index 2d1fea1..b016180 100755
--- a/conf/zeppelin-site.xml.template
+++ b/conf/zeppelin-site.xml.template
@@ -178,7 +178,7 @@
 
 <property>
   <name>zeppelin.interpreters</name>
-  <value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.rinterpreter.RRepl,org.apache.zeppelin.rinterpreter.KnitR,org.apache.zeppelin.spark.SparkRInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter,org.apache.zeppelin.tajo.TajoInterpreter,org.apache.zeppelin.file.HDFSFileInterpreter,org.apache.zeppelin.flink.FlinkInterpreter,,org.apache.zeppelin.python.PythonInterpreter,org.apache.zeppelin.lens.LensInterpreter,org.apache.zeppelin.ignite.IgniteInterpreter,org.apache.zeppelin.ignite.IgniteSqlInterpreter,org.apache.zeppelin.cassandra.CassandraInterpreter,org.apache.zeppelin.geode.GeodeOqlInterpreter,org.apache.zeppelin.postgresql.PostgreSqlInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.ph
 oenix.PhoenixInterpreter,org.apache.zeppelin.kylin.KylinInterpreter,org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,org.apache.zeppelin.scalding.ScaldingInterpreter,org.apache.zeppelin.alluxio.AlluxioInterpreter,org.apache.zeppelin.hbase.HbaseInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter</value>
+  <value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.rinterpreter.RRepl,org.apache.zeppelin.rinterpreter.KnitR,org.apache.zeppelin.spark.SparkRInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.tajo.TajoInterpreter,org.apache.zeppelin.file.HDFSFileInterpreter,org.apache.zeppelin.flink.FlinkInterpreter,,org.apache.zeppelin.python.PythonInterpreter,org.apache.zeppelin.lens.LensInterpreter,org.apache.zeppelin.ignite.IgniteInterpreter,org.apache.zeppelin.ignite.IgniteSqlInterpreter,org.apache.zeppelin.cassandra.CassandraInterpreter,org.apache.zeppelin.geode.GeodeOqlInterpreter,org.apache.zeppelin.postgresql.PostgreSqlInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppe
 lin.kylin.KylinInterpreter,org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,org.apache.zeppelin.scalding.ScaldingInterpreter,org.apache.zeppelin.alluxio.AlluxioInterpreter,org.apache.zeppelin.hbase.HbaseInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter</value>
   <description>Comma separated interpreter configurations. First interpreter become a default</description>
 </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/development/writingzeppelininterpreter.md
----------------------------------------------------------------------
diff --git a/docs/development/writingzeppelininterpreter.md b/docs/development/writingzeppelininterpreter.md
index a024fca..5f797e8 100644
--- a/docs/development/writingzeppelininterpreter.md
+++ b/docs/development/writingzeppelininterpreter.md
@@ -199,7 +199,7 @@ Checkout some interpreters released with Zeppelin by default.
  - [spark](https://github.com/apache/incubator-zeppelin/tree/master/spark)
  - [markdown](https://github.com/apache/incubator-zeppelin/tree/master/markdown)
  - [shell](https://github.com/apache/incubator-zeppelin/tree/master/shell)
- - [hive](https://github.com/apache/incubator-zeppelin/tree/master/hive)
+ - [jdbc](https://github.com/apache/incubator-zeppelin/tree/master/jdbc)
 
 ### Contributing a new Interpreter to Zeppelin releases
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/index.md
----------------------------------------------------------------------
diff --git a/docs/index.md b/docs/index.md
index 19d9390..e6a71f0 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -41,7 +41,7 @@ limitations under the License.
 ### Multiple language backend
 
 Zeppelin interpreter concept allows any language/data-processing-backend to be plugged into Zeppelin.
-Currently Zeppelin supports many interpreters such as Scala(with Apache Spark), Python(with Apache Spark), SparkSQL, Hive, Markdown and Shell.
+Currently Zeppelin supports many interpreters such as Scala(with Apache Spark), Python(with Apache Spark), SparkSQL, JDBC, Markdown and Shell.
 
 <img class="img-responsive" src="/assets/themes/zeppelin/img/screenshots/multiple_language_backend.png" />
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/install/install.md
----------------------------------------------------------------------
diff --git a/docs/install/install.md b/docs/install/install.md
index 696f837..4dcad74 100644
--- a/docs/install/install.md
+++ b/docs/install/install.md
@@ -232,7 +232,7 @@ You can configure Zeppelin with both **environment variables** in `conf/zeppelin
     <td>ZEPPELIN_INTERPRETERS</td>
     <td>zeppelin.interpreters</td>
   <description></description>
-    <td>org.apache.zeppelin.spark.SparkInterpreter,<br />org.apache.zeppelin.spark.PySparkInterpreter,<br />org.apache.zeppelin.spark.SparkSqlInterpreter,<br />org.apache.zeppelin.spark.DepInterpreter,<br />org.apache.zeppelin.markdown.Markdown,<br />org.apache.zeppelin.shell.ShellInterpreter,<br />org.apache.zeppelin.hive.HiveInterpreter<br />
+    <td>org.apache.zeppelin.spark.SparkInterpreter,<br />org.apache.zeppelin.spark.PySparkInterpreter,<br />org.apache.zeppelin.spark.SparkSqlInterpreter,<br />org.apache.zeppelin.spark.DepInterpreter,<br />org.apache.zeppelin.markdown.Markdown,<br />org.apache.zeppelin.shell.ShellInterpreter,<br />
     ...
     </td>
     <td>Comma separated interpreter configurations [Class] <br /> The first interpreter will be a default value. <br /> It means only the first interpreter in this list can be available without <code>%interpreter_name</code> annotation in Zeppelin notebook paragraph. </td>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/install/yarn_install.md
----------------------------------------------------------------------
diff --git a/docs/install/yarn_install.md b/docs/install/yarn_install.md
index 764014b..1f01c35 100644
--- a/docs/install/yarn_install.md
+++ b/docs/install/yarn_install.md
@@ -20,7 +20,7 @@ limitations under the License.
 {% include JB/setup %}
 
 ## Introduction
-This page describes how to pre-configure a bare metal node, configure Zeppelin and connect it to existing YARN cluster running Hortonworks flavour of Hadoop. It also describes steps to configure Spark & Hive interpreter of Zeppelin.
+This page describes how to pre-configure a bare metal node, configure Zeppelin and connect it to existing YARN cluster running Hortonworks flavour of Hadoop. It also describes steps to configure Spark interpreter of Zeppelin.
 
 ## Prepare Node
 
@@ -118,16 +118,12 @@ bin/zeppelin-daemon.sh stop
 ```
 
 ## Interpreter
-Zeppelin provides various distributed processing frameworks to process data that ranges from Spark, Hive, Tajo, Ignite and Lens to name a few. This document describes to configure Hive & Spark interpreters.
+Zeppelin provides various distributed processing frameworks to process data that ranges from Spark, JDBC, Tajo, Ignite and Lens to name a few. This document describes to configure JDBC & Spark interpreters.
 
 ### Hive
-Zeppelin supports Hive interpreter and hence copy hive-site.xml that should be present at /etc/hive/conf to the configuration folder of Zeppelin. Once Zeppelin is built it will have conf folder under /home/zeppelin/incubator-zeppelin.
+Zeppelin supports Hive through JDBC interpreter. You might need the information to use Hive and can find in your hive-site.xml
 
-```bash
-cp /etc/hive/conf/hive-site.xml  /home/zeppelin/incubator-zeppelin/conf
-```
-
-Once Zeppelin server has started successfully, visit http://[zeppelin-server-host-name]:8080 with your web browser. Click on Interpreter tab next to Notebook dropdown. Look for Hive configurations and set them appropriately. By default hive.hiveserver2.url will be pointing to localhost and hive.hiveserver2.password/hive.hiveserver2.user are set to hive/hive. Set them as per Hive installation on YARN cluster.
+Once Zeppelin server has started successfully, visit http://[zeppelin-server-host-name]:8080 with your web browser. Click on Interpreter tab next to Notebook dropdown. Look for Hive configurations and set them appropriately. Set them as per Hive installation on YARN cluster.
 Click on Save button. Once these configurations are updated, Zeppelin will prompt you to restart the interpreter. Accept the prompt and the interpreter will reload the configurations.
 
 ### Spark

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/interpreter/hive.md
----------------------------------------------------------------------
diff --git a/docs/interpreter/hive.md b/docs/interpreter/hive.md
index 99d483b..2fc365c 100644
--- a/docs/interpreter/hive.md
+++ b/docs/interpreter/hive.md
@@ -9,6 +9,51 @@ group: manual
 ## Hive Interpreter for Apache Zeppelin
 The [Apache Hive](https://hive.apache.org/) \u2122 data warehouse software facilitates querying and managing large datasets residing in distributed storage. Hive provides a mechanism to project structure onto this data and query the data using a SQL-like language called HiveQL. At the same time this language also allows traditional map/reduce programmers to plug in their custom mappers and reducers when it is inconvenient or inefficient to express this logic in HiveQL.
 
+## Important Notice
+Hive Interpreter will be deprecated and merged into JDBC Interpreter. You can use Hive Interpreter by using JDBC Interpreter with same functionality. See the example below of settings and dependencies.
+
+### Properties
+<table class="table-configuration">
+  <tr>
+    <th>Property</th>
+    <th>Value</th>
+  </tr>
+  <tr>
+    <td>hive.driver</td>
+    <td>org.apache.hive.jdbc.HiveDriver</td>
+  </tr>
+  <tr>
+    <td>hive.url</td>
+    <td>jdbc:hive2://localhost:10000</td>
+  </tr>
+  <tr>
+    <td>hive.user</td>
+    <td>hiveUser</td>
+  </tr>
+  <tr>
+    <td>hive.password</td>
+    <td>hivePassword</td>
+  </tr>
+</table>
+
+### Dependencies
+<table class="table-configuration">
+  <tr>
+    <th>Artifact</th>
+    <th>Exclude</th>
+  </tr>
+  <tr>
+    <td>org.apache.hive:hive-jdbc:0.14.0</td>
+    <td></td>
+  </tr>
+  <tr>
+    <td>org.apache.hadoop:hadoop-common:2.6.0</td>
+    <td></td>
+  </tr>
+</table>
+
+----
+
 ### Configuration
 <table class="table-configuration">
   <tr>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/interpreter/jdbc.md
----------------------------------------------------------------------
diff --git a/docs/interpreter/jdbc.md b/docs/interpreter/jdbc.md
index f12c673..72cce3d 100644
--- a/docs/interpreter/jdbc.md
+++ b/docs/interpreter/jdbc.md
@@ -195,6 +195,47 @@ To develop this functionality use this [method](http://docs.oracle.com/javase/7/
   </tr>
 </table>
 
+### Examples
+#### Hive
+##### Properties
+ <table class="table-configuration">
+   <tr>
+     <th>Name</th>
+     <th>Value</th>
+   </tr>
+   <tr>
+     <td>hive.driver</td>
+     <td>org.apache.hive.jdbc.HiveDriver</td>
+   </tr>
+   <tr>
+     <td>hive.url</td>
+     <td>jdbc:hive2://localhost:10000</td>
+   </tr>
+   <tr>
+     <td>hive.user</td>
+     <td>hive_user</td>
+   </tr>
+   <tr>
+     <td>hive.password</td>
+     <td>hive_password</td>
+   </tr>
+ </table>
+##### Dependencies
+ <table class="table-configuration">
+   <tr>
+     <th>Artifact</th>
+     <th>Excludes</th>
+   </tr>
+   <tr>
+     <td>org.apache.hive:hive-jdbc:0.14.0</td>
+     <td></td>
+   </tr>
+   <tr>
+     <td>org.apache.hadoop:hadoop-common:2.6.0</td>
+     <td></td>
+   </tr>
+ </table>
+
 ### How to use
 
 #### Reference in paragraph

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/manual/interpreters.md
----------------------------------------------------------------------
diff --git a/docs/manual/interpreters.md b/docs/manual/interpreters.md
index 87a4122..ca73bef 100644
--- a/docs/manual/interpreters.md
+++ b/docs/manual/interpreters.md
@@ -22,7 +22,7 @@ limitations under the License.
 ## Interpreters in Zeppelin
 In this section, we will explain about the role of interpreters, interpreters group and interpreter settings in Zeppelin.
 The concept of Zeppelin interpreter allows any language/data-processing-backend to be plugged into Zeppelin.
-Currently, Zeppelin supports many interpreters such as Scala ( with Apache Spark ), Python ( with Apache Spark ), SparkSQL, Hive, Markdown, Shell and so on.
+Currently, Zeppelin supports many interpreters such as Scala ( with Apache Spark ), Python ( with Apache Spark ), SparkSQL, JDBC, Markdown, Shell and so on.
 
 ## What is Zeppelin interpreter?
 Zeppelin Interpreter is a plug-in which enables Zeppelin users to use a specific language/data-processing-backend. For example, to use Scala code in Zeppelin, you need `%spark` interpreter.

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/docs/security/interpreter_authorization.md
----------------------------------------------------------------------
diff --git a/docs/security/interpreter_authorization.md b/docs/security/interpreter_authorization.md
index 862ef9b..d6c48b2 100644
--- a/docs/security/interpreter_authorization.md
+++ b/docs/security/interpreter_authorization.md
@@ -27,7 +27,7 @@ Interpreter authorization involves permissions like creating an interpreter and
 
 Data source authorization involves authenticating to the data source like a Mysql database and letting it determine user permissions.
 
-For the Hive interpreter, we need to maintain per-user connection pools.
+For the JDBC interpreter, we need to maintain per-user connection pools.
 The interpret method takes the user string as parameter and executes the jdbc call using a connection in the user's connection pool.
 
 In case of Presto, we don't need password if the Presto DB server runs backend code using HDFS authorization for the user.

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/hive/pom.xml
----------------------------------------------------------------------
diff --git a/hive/pom.xml b/hive/pom.xml
deleted file mode 100644
index 057a335..0000000
--- a/hive/pom.xml
+++ /dev/null
@@ -1,165 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <artifactId>zeppelin</artifactId>
-    <groupId>org.apache.zeppelin</groupId>
-    <version>0.6.0-SNAPSHOT</version>
-    <relativePath>..</relativePath>
-  </parent>
-
-  <groupId>org.apache.zeppelin</groupId>
-  <artifactId>zeppelin-hive</artifactId>
-  <packaging>jar</packaging>
-  <version>0.6.0-SNAPSHOT</version>
-  <name>Zeppelin: Hive interpreter</name>
-  <url>http://www.apache.org</url>
-<!--
-  <repositories>
-   <repository>
-       <id>mapr-releases</id>
-       <url>http://repository.mapr.com/maven/</url>
-       <snapshots><enabled>false</enabled></snapshots>
-       <releases><enabled>true</enabled></releases>
-     </repository>
-  </repositories>
--->
-  <properties>
-    <hive.hive.version>0.14.0</hive.hive.version>
-    <hive.hadoop.version>2.6.0</hive.hadoop.version>
-<!--<hive.hive.version>1.0.0-mapr-1504</hive.hive.version>
-    <hive.hadoop.version>2.7.0-mapr-1506</hive.hadoop.version> -->
-  </properties>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.zeppelin</groupId>
-      <artifactId>zeppelin-interpreter</artifactId>
-      <version>${project.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-exec</artifactId>
-      <version>1.3</version>
-    </dependency>
-	
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-    </dependency>
-    
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-jdbc</artifactId>
-      <version>${hive.hive.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <version>${hive.hadoop.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>com.h2database</groupId>
-      <artifactId>h2</artifactId>
-      <version>1.4.190</version>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-deploy-plugin</artifactId>
-        <version>2.7</version>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-
-      <plugin>
-        <artifactId>maven-enforcer-plugin</artifactId>
-        <version>1.3.1</version>            
-        <executions> 
-          <execution> 
-            <id>enforce</id> 
-            <phase>none</phase> 
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <version>2.8</version>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/../../interpreter/hive</outputDirectory>
-              <overWriteReleases>false</overWriteReleases>
-              <overWriteSnapshots>false</overWriteSnapshots>
-              <overWriteIfNewer>true</overWriteIfNewer>
-              <includeScope>runtime</includeScope>
-            </configuration>
-          </execution>
-          <execution>
-            <id>copy-artifact</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/../../interpreter/hive</outputDirectory>
-              <overWriteReleases>false</overWriteReleases>
-              <overWriteSnapshots>false</overWriteSnapshots>
-              <overWriteIfNewer>true</overWriteIfNewer>
-              <includeScope>runtime</includeScope>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>${project.groupId}</groupId>
-                  <artifactId>${project.artifactId}</artifactId>
-                  <version>${project.version}</version>
-                  <type>${project.packaging}</type>
-                </artifactItem>
-              </artifactItems>              
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
----------------------------------------------------------------------
diff --git a/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java b/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
deleted file mode 100644
index 42916b4..0000000
--- a/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
+++ /dev/null
@@ -1,391 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.hive;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import org.apache.zeppelin.interpreter.Interpreter;
-import org.apache.zeppelin.interpreter.InterpreterContext;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.apache.zeppelin.interpreter.InterpreterResult.Code;
-import org.apache.zeppelin.scheduler.Scheduler;
-import org.apache.zeppelin.scheduler.SchedulerFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.commons.lang.StringUtils.containsIgnoreCase;
-
-/**
- * Hive interpreter for Zeppelin.
- */
-public class HiveInterpreter extends Interpreter {
-  Logger logger = LoggerFactory.getLogger(HiveInterpreter.class);
-
-  static final String COMMON_KEY = "common";
-  static final String MAX_LINE_KEY = "max_count";
-  static final String MAX_LINE_DEFAULT = "1000";
-
-  static final String DEFAULT_KEY = "default";
-  static final String DRIVER_KEY = "driver";
-  static final String URL_KEY = "url";
-  static final String USER_KEY = "user";
-  static final String PASSWORD_KEY = "password";
-  static final String DOT = ".";
-
-  static final char TAB = '\t';
-  static final char NEWLINE = '\n';
-  static final String EXPLAIN_PREDICATE = "EXPLAIN ";
-  static final String TABLE_MAGIC_TAG = "%table ";
-  static final String UPDATE_COUNT_HEADER = "Update Count";
-
-  static final String COMMON_MAX_LINE = COMMON_KEY + DOT + MAX_LINE_KEY;
-
-  static final String DEFAULT_DRIVER = DEFAULT_KEY + DOT + DRIVER_KEY;
-  static final String DEFAULT_URL = DEFAULT_KEY + DOT + URL_KEY;
-  static final String DEFAULT_USER = DEFAULT_KEY + DOT + USER_KEY;
-  static final String DEFAULT_PASSWORD = DEFAULT_KEY + DOT + PASSWORD_KEY;
-
-  private final HashMap<String, Properties> propertiesMap;
-  private final Map<String, Statement> paragraphIdStatementMap;
-
-  private final Map<String, ArrayList<Connection>> propertyKeyUnusedConnectionListMap;
-  private final Map<String, Connection> paragraphIdConnectionMap;
-
-  static {
-    Interpreter.register(
-        "hql",
-        "hive",
-        HiveInterpreter.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add(COMMON_MAX_LINE, MAX_LINE_DEFAULT, "Maximum line of results")
-            .add(DEFAULT_DRIVER, "org.apache.hive.jdbc.HiveDriver", "Hive JDBC driver")
-            .add(DEFAULT_URL, "jdbc:hive2://localhost:10000", "The URL for HiveServer2.")
-            .add(DEFAULT_USER, "hive", "The hive user")
-            .add(DEFAULT_PASSWORD, "", "The password for the hive user").build());
-  }
-
-  public HiveInterpreter(Properties property) {
-    super(property);
-    propertiesMap = new HashMap<>();
-    propertyKeyUnusedConnectionListMap = new HashMap<>();
-    paragraphIdStatementMap = new HashMap<>();
-    paragraphIdConnectionMap = new HashMap<>();
-  }
-
-  public HashMap<String, Properties> getPropertiesMap() {
-    return propertiesMap;
-  }
-
-  @Override
-  public void open() {
-    logger.debug("property: {}", property);
-
-    for (String propertyKey : property.stringPropertyNames()) {
-      logger.debug("propertyKey: {}", propertyKey);
-      String[] keyValue = propertyKey.split("\\.", 2);
-      if (2 == keyValue.length) {
-        logger.debug("key: {}, value: {}", keyValue[0], keyValue[1]);
-        Properties prefixProperties;
-        if (propertiesMap.containsKey(keyValue[0])) {
-          prefixProperties = propertiesMap.get(keyValue[0]);
-        } else {
-          prefixProperties = new Properties();
-          propertiesMap.put(keyValue[0], prefixProperties);
-        }
-        prefixProperties.put(keyValue[1], property.getProperty(propertyKey));
-      }
-    }
-
-    Set<String> removeKeySet = new HashSet<>();
-    for (String key : propertiesMap.keySet()) {
-      if (!COMMON_KEY.equals(key)) {
-        Properties properties = propertiesMap.get(key);
-        if (!properties.containsKey(DRIVER_KEY) || !properties.containsKey(URL_KEY)) {
-          logger.error("{} will be ignored. {}.{} and {}.{} is mandatory.",
-              key, DRIVER_KEY, key, key, URL_KEY);
-          removeKeySet.add(key);
-        }
-      }
-    }
-
-    for (String key : removeKeySet) {
-      propertiesMap.remove(key);
-    }
-
-    logger.debug("propertiesMap: {}", propertiesMap);
-  }
-
-  @Override
-  public void close() {
-    try {
-      for (List<Connection> connectionList : propertyKeyUnusedConnectionListMap.values()) {
-        for (Connection c : connectionList) {
-          c.close();
-        }
-      }
-
-      for (Statement statement : paragraphIdStatementMap.values()) {
-        statement.close();
-      }
-      paragraphIdStatementMap.clear();
-
-      for (Connection connection : paragraphIdConnectionMap.values()) {
-        connection.close();
-      }
-      paragraphIdConnectionMap.clear();
-
-    } catch (SQLException e) {
-      logger.error("Error while closing...", e);
-    }
-  }
-
-  public Connection getConnection(String propertyKey) throws ClassNotFoundException, SQLException {
-    Connection connection = null;
-    if (propertyKey == null || propertiesMap.get(propertyKey) == null) {
-      return null;
-    }
-    if (propertyKeyUnusedConnectionListMap.containsKey(propertyKey)) {
-      ArrayList<Connection> connectionList = propertyKeyUnusedConnectionListMap.get(propertyKey);
-      if (0 != connectionList.size()) {
-        connection = propertyKeyUnusedConnectionListMap.get(propertyKey).remove(0);
-        if (null != connection && connection.isClosed()) {
-          connection.close();
-          connection = null;
-        }
-      }
-    }
-    if (null == connection) {
-      Properties properties = propertiesMap.get(propertyKey);
-      Class.forName(properties.getProperty(DRIVER_KEY));
-      String url = properties.getProperty(URL_KEY);
-      String user = properties.getProperty(USER_KEY);
-      String password = properties.getProperty(PASSWORD_KEY);
-      if (null != user && null != password) {
-        connection = DriverManager.getConnection(url, user, password);
-      } else {
-        connection = DriverManager.getConnection(url, properties);
-      }
-    }
-    return connection;
-  }
-
-  public Statement getStatement(String propertyKey, String paragraphId)
-      throws SQLException, ClassNotFoundException {
-    Connection connection;
-    if (paragraphIdConnectionMap.containsKey(paragraphId)) {
-      // Never enter for now.
-      connection = paragraphIdConnectionMap.get(paragraphId);
-    } else {
-      connection = getConnection(propertyKey);
-    }
-    
-    if (connection == null) {
-      return null;
-    }
-
-    Statement statement = connection.createStatement();
-    if (isStatementClosed(statement)) {
-      connection = getConnection(propertyKey);
-      statement = connection.createStatement();
-    }
-    paragraphIdConnectionMap.put(paragraphId, connection);
-    paragraphIdStatementMap.put(paragraphId, statement);
-
-    return statement;
-  }
-
-  private boolean isStatementClosed(Statement statement) {
-    try {
-      return statement.isClosed();
-    } catch (Throwable t) {
-      logger.debug("{} doesn't support isClosed method", statement);
-      return false;
-    }
-  }
-
-  public InterpreterResult executeSql(String propertyKey, String sql,
-                                      InterpreterContext interpreterContext) {
-    String paragraphId = interpreterContext.getParagraphId();
-
-    try {
-
-      Statement statement = getStatement(propertyKey, paragraphId);
-
-      if (statement == null) {
-        return new InterpreterResult(Code.ERROR, "Prefix not found.");
-      }
-
-      statement.setMaxRows(getMaxResult());
-
-      StringBuilder msg;
-
-      if (containsIgnoreCase(sql, EXPLAIN_PREDICATE)) {
-        msg = new StringBuilder();
-      } else {
-        msg = new StringBuilder(TABLE_MAGIC_TAG);
-      }
-
-      ResultSet resultSet = null;
-
-      try {
-        boolean isResultSetAvailable = statement.execute(sql);
-
-        if (isResultSetAvailable) {
-          resultSet = statement.getResultSet();
-
-          ResultSetMetaData md = resultSet.getMetaData();
-
-          for (int i = 1; i < md.getColumnCount() + 1; i++) {
-            if (i > 1) {
-              msg.append(TAB);
-            }
-            msg.append(md.getColumnName(i));
-          }
-          msg.append(NEWLINE);
-
-          int displayRowCount = 0;
-          while (resultSet.next() && displayRowCount < getMaxResult()) {
-            for (int i = 1; i < md.getColumnCount() + 1; i++) {
-              msg.append(resultSet.getString(i));
-              if (i != md.getColumnCount()) {
-                msg.append(TAB);
-              }
-            }
-            msg.append(NEWLINE);
-            displayRowCount++;
-          }
-        } else {
-          // Response contains either an update count or there are no results.
-          int updateCount = statement.getUpdateCount();
-          msg.append(UPDATE_COUNT_HEADER).append(NEWLINE);
-          msg.append(updateCount).append(NEWLINE);
-        }
-      } finally {
-        try {
-          if (resultSet != null) {
-            resultSet.close();
-          }
-          statement.close();
-        } finally {
-          moveConnectionToUnused(propertyKey, paragraphId);
-        }
-      }
-
-      return new InterpreterResult(Code.SUCCESS, msg.toString());
-
-    } catch (SQLException | ClassNotFoundException ex) {
-      logger.error("Cannot run " + sql, ex);
-      return new InterpreterResult(Code.ERROR, ex.getMessage());
-    }
-  }
-
-  private void moveConnectionToUnused(String propertyKey, String paragraphId) {
-    if (paragraphIdConnectionMap.containsKey(paragraphId)) {
-      Connection connection = paragraphIdConnectionMap.remove(paragraphId);
-      if (null != connection) {
-        if (propertyKeyUnusedConnectionListMap.containsKey(propertyKey)) {
-          propertyKeyUnusedConnectionListMap.get(propertyKey).add(connection);
-        } else {
-          ArrayList<Connection> connectionList = new ArrayList<>();
-          connectionList.add(connection);
-          propertyKeyUnusedConnectionListMap.put(propertyKey, connectionList);
-        }
-      }
-    }
-  }
-
-  @Override
-  public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
-    String propertyKey = getPropertyKey(cmd);
-
-    if (null != propertyKey && !propertyKey.equals(DEFAULT_KEY)) {
-      cmd = cmd.substring(propertyKey.length() + 2);
-    }
-
-    cmd = cmd.trim();
-
-    logger.info("PropertyKey: {}, SQL command: '{}'", propertyKey, cmd);
-
-    return executeSql(propertyKey, cmd, contextInterpreter);
-  }
-
-  private int getMaxResult() {
-    return Integer.valueOf(
-        propertiesMap.get(COMMON_KEY).getProperty(MAX_LINE_KEY, MAX_LINE_DEFAULT));
-  }
-
-  public String getPropertyKey(String cmd) {
-    boolean firstLineIndex = cmd.startsWith("(");
-
-    if (firstLineIndex) {
-      int configStartIndex = cmd.indexOf("(");
-      int configLastIndex = cmd.indexOf(")");
-      if (configStartIndex != -1 && configLastIndex != -1) {
-        return cmd.substring(configStartIndex + 1, configLastIndex);
-      } else {
-        return null;
-      }
-    } else {
-      return DEFAULT_KEY;
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    String paragraphId = context.getParagraphId();
-    try {
-      paragraphIdStatementMap.get(paragraphId).cancel();
-    } catch (SQLException e) {
-      logger.error("Error while cancelling...", e);
-    }
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.SIMPLE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return SchedulerFactory.singleton().createOrGetParallelScheduler(
-        HiveInterpreter.class.getName() + this.hashCode(), 10);
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
----------------------------------------------------------------------
diff --git a/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java b/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
deleted file mode 100644
index cbec104..0000000
--- a/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.zeppelin.hive;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Properties;
-
-import org.apache.zeppelin.interpreter.InterpreterContext;
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.junit.After;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-import static java.lang.String.format;
-
-/**
- * Hive interpreter unit tests
- */
-public class HiveInterpreterTest {
-  static String jdbcConnection;
-
-  private static String getJdbcConnection() throws IOException {
-    if(null == jdbcConnection) {
-      Path tmpDir = Files.createTempDirectory("h2-test-");
-      tmpDir.toFile().deleteOnExit();
-      jdbcConnection = format("jdbc:h2:%s", tmpDir);
-    }
-    return jdbcConnection;
-  }
-  @BeforeClass
-  public static void setUp() throws Exception {
-
-    Class.forName("org.h2.Driver");
-    Connection connection = DriverManager.getConnection(getJdbcConnection());
-    Statement statement = connection.createStatement();
-    statement.execute(
-        "DROP TABLE IF EXISTS test_table; " +
-        "CREATE TABLE test_table(id varchar(255), name varchar(255));");
-    statement.execute(
-        "insert into test_table(id, name) values ('a', 'a_name'),('b', 'b_name');"
-    );
-  }
-
-  @After
-  public void tearDown() throws Exception {
-  }
-  
-  @Test
-  public void testForParsePropertyKey() throws IOException {
-    HiveInterpreter t = new HiveInterpreter(new Properties());
-    
-    assertEquals(t.getPropertyKey("(fake) select max(cant) from test_table where id >= 2452640"),
-        "fake");
-    
-    assertEquals(t.getPropertyKey("() select max(cant) from test_table where id >= 2452640"),
-        "");
-    
-    assertEquals(t.getPropertyKey(")fake( select max(cant) from test_table where id >= 2452640"),
-        "default");
-        
-    // when you use a %hive(prefix1), prefix1 is the propertyKey as form part of the cmd string
-    assertEquals(t.getPropertyKey("(prefix1)\n select max(cant) from test_table where id >= 2452640"),
-        "prefix1");
-    
-    assertEquals(t.getPropertyKey("(prefix2) select max(cant) from test_table where id >= 2452640"),
-            "prefix2");
-    
-    // when you use a %hive, prefix is the default
-    assertEquals(t.getPropertyKey("select max(cant) from test_table where id >= 2452640"),
-            "default");
-  }
-  
-  @Test
-  public void testForMapPrefix() throws SQLException, IOException {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "org.h2.Driver");
-    properties.setProperty("default.url", getJdbcConnection());
-    properties.setProperty("default.user", "");
-    properties.setProperty("default.password", "");
-    HiveInterpreter t = new HiveInterpreter(properties);
-    t.open();
-
-    String sqlQuery = "(fake) select * from test_table";
-
-    InterpreterResult interpreterResult = t.interpret(sqlQuery, new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
-
-    // if prefix not found return ERROR and Prefix not found.
-    assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code());
-    assertEquals("Prefix not found.", interpreterResult.message());
-  }
-  
-  @Test
-  public void readTest() throws IOException {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "org.h2.Driver");
-    properties.setProperty("default.url", getJdbcConnection());
-    properties.setProperty("default.user", "");
-    properties.setProperty("default.password", "");
-    HiveInterpreter t = new HiveInterpreter(properties);
-    t.open();
-
-    assertTrue(t.interpret("show databases", new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null)).message().contains("SCHEMA_NAME"));
-    assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n",
-        t.interpret("select * from test_table", new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null)).message());
-  }
-
-  @Test
-  public void readTestWithConfiguration() throws IOException {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "wrong.Driver");
-    properties.setProperty("default.url", getJdbcConnection());
-    properties.setProperty("default.user", "");
-    properties.setProperty("default.password", "");
-    properties.setProperty("h2.driver", "org.h2.Driver");
-    properties.setProperty("h2.url", getJdbcConnection());
-    properties.setProperty("h2.user", "");
-    properties.setProperty("h2.password", "");
-    HiveInterpreter t = new HiveInterpreter(properties);
-    t.open();
-
-    assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n",
-        t.interpret("(h2)\n select * from test_table", new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null)).message());
-  }
-
-  @Test
-  public void jdbcRestart() throws IOException, SQLException, ClassNotFoundException {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "org.h2.Driver");
-    properties.setProperty("default.url", getJdbcConnection());
-    properties.setProperty("default.user", "");
-    properties.setProperty("default.password", "");
-    HiveInterpreter t = new HiveInterpreter(properties);
-    t.open();
-
-    InterpreterResult interpreterResult =
-        t.interpret("select * from test_table", new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
-    assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n", interpreterResult.message());
-
-    t.getConnection("default").close();
-
-    interpreterResult =
-        t.interpret("select * from test_table", new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
-    assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n", interpreterResult.message());
-  }
-
-  @Test
-  public void test() throws IOException {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "org.h2.Driver");
-    properties.setProperty("default.url", getJdbcConnection());
-    properties.setProperty("default.user", "");
-    properties.setProperty("default.password", "");
-    HiveInterpreter t = new HiveInterpreter(properties);
-    t.open();
-
-    InterpreterContext interpreterContext = new InterpreterContext(null, "a", null, null, null, null, null, null, null, null, null);
-
-    //simple select test
-    InterpreterResult result = t.interpret("select * from test_table", interpreterContext);
-    assertEquals(result.type(), InterpreterResult.Type.TABLE);
-
-    //explain test
-    result = t.interpret("explain select * from test_table", interpreterContext);
-    assertEquals(result.type(), InterpreterResult.Type.TEXT);
-    t.close();
-  }
-
-  @Test
-  public void parseMultiplePropertiesMap() {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "defaultDriver");
-    properties.setProperty("default.url", "defaultUri");
-    properties.setProperty("default.user", "defaultUser");
-    HiveInterpreter hi = new HiveInterpreter(properties);
-    hi.open();
-    assertNotNull("propertiesMap is not null", hi.getPropertiesMap());
-    assertNotNull("propertiesMap.get(default) is not null", hi.getPropertiesMap().get("default"));
-    assertTrue("default exists", "defaultDriver".equals(hi.getPropertiesMap().get("default").getProperty("driver")));
-    hi.close();
-  }
-
-  @Test
-  public void ignoreInvalidSettings() {
-    Properties properties = new Properties();
-    properties.setProperty("common.max_count", "1000");
-    properties.setProperty("common.max_retry", "3");
-    properties.setProperty("default.driver", "defaultDriver");
-    properties.setProperty("default.url", "defaultUri");
-    properties.setProperty("default.user", "defaultUser");
-    properties.setProperty("presto.driver", "com.facebook.presto.jdbc.PrestoDriver");
-    HiveInterpreter hi = new HiveInterpreter(properties);
-    hi.open();
-    assertTrue("default exists", hi.getPropertiesMap().containsKey("default"));
-    assertFalse("presto doesn't exists", hi.getPropertiesMap().containsKey("presto"));
-    hi.close();
-  }
-
-  @Test
-  public void getPropertyKey() {
-    HiveInterpreter hi = new HiveInterpreter(new Properties());
-    hi.open();
-    String testCommand = "(default)\nshow tables";
-    assertEquals("get key of default", "default", hi.getPropertyKey(testCommand));
-    testCommand = "(default) show tables";
-    assertEquals("get key of default", "default", hi.getPropertyKey(testCommand));
-    hi.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/43baa0af/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 970f6cb..2451dec 100755
--- a/pom.xml
+++ b/pom.xml
@@ -72,7 +72,6 @@
     <module>angular</module>
     <module>shell</module>
     <module>livy</module>
-    <module>hive</module>
     <module>hbase</module>
     <module>phoenix</module>
     <module>postgresql</module>