You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2016/05/09 11:15:51 UTC

[3/3] lens git commit: LENS-518 : Move Hive dependency to Apache Hive master

LENS-518 : Move Hive dependency to Apache Hive master


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d6b12169
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d6b12169
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d6b12169

Branch: refs/heads/master
Commit: d6b1216922f1438a7fc000dfcdd9121d87c65149
Parents: d264c9a
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon May 9 16:45:32 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon May 9 16:45:32 2016 +0530

----------------------------------------------------------------------
 lens-cli/pom.xml                                |  31 +++
 .../apache/lens/cli/TestLensQueryCommands.java  |  28 ++-
 lens-client/pom.xml                             |  21 ++
 lens-cube/pom.xml                               |  27 ++-
 .../lens/cube/metadata/CubeMetastoreClient.java |  11 +-
 .../org/apache/lens/cube/metadata/Storage.java  |   4 +-
 .../lens/cube/parse/CubeQueryContext.java       |   7 +-
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |   7 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |  35 ++--
 .../cube/metadata/TestCubeMetastoreClient.java  |  19 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  34 ++--
 .../apache/lens/cube/parse/TestHQLParser.java   |  56 +++---
 .../org/apache/lens/cube/parse/TestQuery.java   |   9 +-
 ...stFactPartitionBasedQueryCostCalculator.java |   2 +-
 lens-cube/src/test/resources/hive-site.xml      |  10 +
 lens-driver-es/pom.xml                          | 129 +++++++-----
 .../client/jest/JestResultSetTransformer.java   |   2 +-
 .../org/apache/lens/driver/es/ESDriverTest.java |   5 +
 .../org/apache/lens/driver/es/MockClientES.java |   2 +-
 .../driver/es/ResultSetTransformationTest.java  |   2 +-
 lens-driver-es/src/test/resources/hive-site.xml |  53 +++++
 lens-driver-es/src/test/resources/logback.xml   |  36 ++++
 lens-driver-hive/pom.xml                        |  17 ++
 .../driver/hive/EmbeddedThriftConnection.java   | 200 ++++++++++++++++++-
 .../org/apache/lens/driver/hive/HiveDriver.java |  62 +++---
 .../lens/driver/hive/HiveInMemoryResultSet.java |   2 +-
 .../apache/lens/driver/hive/HiveQueryPlan.java  |  63 ++++--
 .../driver/hive/RemoteThriftConnection.java     |   7 +-
 .../apache/lens/driver/hive/TestHiveDriver.java | 185 +++++++++--------
 .../lens/driver/hive/TestRemoteHiveDriver.java  |  79 ++++----
 .../src/test/resources/hive-site.xml            |  10 +
 lens-driver-jdbc/pom.xml                        |  24 +++
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |   2 +-
 .../apache/lens/driver/jdbc/JDBCResultSet.java  |   6 +-
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  77 +------
 .../src/test/resources/hive-site.xml            |  15 ++
 lens-examples/pom.xml                           |   1 -
 lens-ml-lib/pom.xml                             |   4 +
 .../java/org/apache/lens/ml/TestMLResource.java |   5 -
 .../java/org/apache/lens/ml/TestMLRunner.java   |   5 -
 lens-query-lib/pom.xml                          |   8 +
 .../lens/lib/query/FilePersistentFormatter.java |   9 +-
 .../lib/query/MockLensResultSetMetadata.java    |  41 ++--
 lens-server-api/pom.xml                         |  12 ++
 .../api/driver/LensResultSetMetadata.java       |   4 +-
 .../save/param/ParameterDataTypeEncoder.java    |   2 +-
 .../MappedDiagnosticLogSegregationContext.java  |   7 +
 lens-server/pom.xml                             |  28 +++
 .../org/apache/lens/server/BaseLensService.java |  25 +--
 .../org/apache/lens/server/LensServices.java    |   2 +-
 .../metastore/CubeMetastoreServiceImpl.java     |   6 +-
 .../apache/lens/server/metastore/JAXBUtils.java |   4 -
 .../server/query/QueryExecutionServiceImpl.java |   1 +
 .../apache/lens/server/rewrite/RewriteUtil.java |  61 +++---
 .../lens/server/session/HiveSessionService.java |  27 +--
 .../lens/server/session/LensSessionImpl.java    |  75 ++++---
 .../log/StatisticsLogPartitionHandler.java      |   2 +-
 .../apache/lens/server/TestServerRestart.java   |   2 +-
 .../lens/server/common/RestAPITestUtil.java     |   2 +-
 .../server/metastore/TestMetastoreService.java  |   4 +-
 .../lens/server/query/TestQueryService.java     |   8 +-
 .../lens/server/query/TestResultFormatting.java |   2 +-
 .../lens/server/rewrite/TestRewriting.java      | 198 ++++++------------
 .../server/session/TestSessionClassLoaders.java |   2 +-
 .../lens/server/session/TestSessionExpiry.java  |   4 +-
 lens-server/src/test/resources/hive-site.xml    |  15 ++
 lens-storage-db/pom.xml                         |   8 +
 .../src/test/resources/hive-site.xml            |   9 +
 pom.xml                                         |  93 ++++++++-
 src/site/apt/admin/deployment.apt               |   8 +-
 src/site/apt/developer/contribute.apt           |   2 +-
 src/site/apt/lenshome/install-and-run.apt       |   3 +-
 tools/conf/server/lens-site.xml                 |   5 +
 73 files changed, 1268 insertions(+), 705 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cli/pom.xml
----------------------------------------------------------------------
diff --git a/lens-cli/pom.xml b/lens-cli/pom.xml
index bfa6f06..6f0555c 100644
--- a/lens-cli/pom.xml
+++ b/lens-cli/pom.xml
@@ -65,6 +65,16 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.testng</groupId>
       <artifactId>testng</artifactId>
     </dependency>
@@ -122,6 +132,27 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
index 1dd2ffe..4d8341a 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
@@ -60,15 +60,6 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class TestLensQueryCommands extends LensCliApplicationTest {
 
-  /** The explain plan. */
-  private static String explainPlan = "TOK_QUERY\n" + "   TOK_FROM\n" + "      TOK_TABREF\n" + "         TOK_TABNAME\n"
-    + "            local_dim_table\n" + "         test_dim\n" + "   TOK_INSERT\n" + "      TOK_DESTINATION\n"
-    + "         TOK_DIR\n" + "            TOK_TMP_FILE\n" + "      TOK_SELECT\n" + "         TOK_SELEXPR\n"
-    + "            .\n" + "               TOK_TABLE_OR_COL\n" + "                  test_dim\n"
-    + "               id\n" + "         TOK_SELEXPR\n" + "            .\n" + "               TOK_TABLE_OR_COL\n"
-    + "                  test_dim\n" + "               name\n" + "      TOK_WHERE\n" + "         =\n"
-    + "            .\n" + "               TOK_TABLE_OR_COL\n" + "                  test_dim\n"
-    + "               dt\n" + "            'latest'";
   private File resDir;
 
   @BeforeClass
@@ -241,7 +232,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     assertEquals(result, "No prepared queries");
 
     final String qh2 = qCom.explainAndPrepare(sql, "testPrepQuery3");
-    assertTrue(qh2.contains(explainPlan));
+    assertExplainOutput(qh2);
     String handles = qCom.getAllPreparedQueries("testPrepQuery3", "all", -1, Long.MAX_VALUE);
     assertFalse(handles.contains("No prepared queries"), handles);
 
@@ -282,7 +273,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String result = qCom.explainQuery(sql, null);
 
     log.debug(result);
-    assertTrue(result.contains(explainPlan));
+    assertExplainOutput(result);
 
     closeClientConnection(qCom);
   }
@@ -406,10 +397,8 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
   }
 
   /**
-   * Sets the up.
-   *
-   * @param client the new up
-   * @throws Exception the exception
+   * Sets up query command instances and adds partitions to a table.
+   * @throws Exception
    */
   @BeforeClass
   public void setup() throws Exception {
@@ -577,4 +566,13 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
 
     client.closeConnection();
   }
+
+  private void assertExplainOutput(String result) {
+    assertTrue(result.contains("Stage-0 is a root stage"));
+    assertTrue(result.contains("Partition Description"));
+    assertTrue(result.contains("dt latest"));
+    assertTrue(result.contains("Processor Tree"));
+    assertTrue(result.contains("TableScan"));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-client/pom.xml
----------------------------------------------------------------------
diff --git a/lens-client/pom.xml b/lens-client/pom.xml
index 2032cf5..d8f7d6c 100644
--- a/lens-client/pom.xml
+++ b/lens-client/pom.xml
@@ -63,6 +63,11 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-service</artifactId>
       <scope>test</scope>
     </dependency>
@@ -147,6 +152,22 @@
       <artifactId>jcl-over-slf4j</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
+    <dependency>
       <groupId>net.sf.opencsv</groupId>
       <artifactId>opencsv</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/pom.xml
----------------------------------------------------------------------
diff --git a/lens-cube/pom.xml b/lens-cube/pom.xml
index 0533f44..ef432c2 100644
--- a/lens-cube/pom.xml
+++ b/lens-cube/pom.xml
@@ -46,9 +46,17 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-service</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-all</artifactId>
     </dependency>
@@ -90,6 +98,23 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
+
   </dependencies>
 
   <build>
@@ -98,7 +123,7 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
-          <argLine>-Xms256m -Xmx512m -XX:PermSize=256m -XX:MaxPermSize=256m</argLine>
+          <argLine>-Xms256m -Xmx512m -XX:PermSize=256m -XX:MaxPermSize=256m -Xss4m</argLine>
         </configuration>
       </plugin>
     </plugins>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 42a8eb2..c8190bc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -225,10 +225,7 @@ public class CubeMetastoreClient {
               partition.getTPartition().getSd().getSerdeInfo().getParameters());
             latestPart.setLocation(partition.getLocation());
             latestPart.setInputFormatClass(partition.getInputFormatClass());
-            latestPart.setOutputFormatClass(partition.getOutputFormatClass());
-            // the following is a fix because hive has a bug: https://issues.apache.org/jira/browse/HIVE-11278.
-            latestPart.getTPartition().getSd().setOutputFormat(
-              HiveFileFormatUtils.getOutputFormatSubstitute(partition.getOutputFormatClass(), false).getName());
+            latestPart.setOutputFormatClass(partition.getOutputFormatClass().asSubclass(HiveOutputFormat.class));
             latestPart.getTPartition().getSd().getSerdeInfo()
               .setSerializationLib(partition.getTPartition().getSd().getSerdeInfo().getSerializationLib());
             latestParts.add(latestPart);
@@ -2061,7 +2058,7 @@ public class CubeMetastoreClient {
     }
     hiveTable.getTTable().getParameters().putAll(cubeTable.getProperties());
     try {
-      getClient().alterTable(table, hiveTable);
+      getClient().alterTable(table, hiveTable, null);
     } catch (InvalidOperationException e) {
       throw new HiveException(e);
     }
@@ -2074,7 +2071,7 @@ public class CubeMetastoreClient {
 
   public void alterHiveTable(String table, Table hiveTable) throws HiveException {
     try {
-      getClient().alterTable(table, hiveTable);
+      getClient().alterTable(table, hiveTable, null);
     } catch (InvalidOperationException e) {
       throw new HiveException(e);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
index 9318603..012081a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
@@ -370,7 +370,7 @@ public abstract class Storage extends AbstractCubeTable implements PartitionMeta
    */
   public void updatePartition(Hive client, String fact, Partition partition)
     throws InvalidOperationException, HiveException {
-    client.alterPartition(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partition);
+    client.alterPartition(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partition, null);
   }
 
   /**
@@ -385,7 +385,7 @@ public abstract class Storage extends AbstractCubeTable implements PartitionMeta
     throws InvalidOperationException, HiveException {
     boolean success = false;
     try {
-      client.alterPartitions(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partitions);
+      client.alterPartitions(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partitions, null);
       success = true;
     } finally {
       if (success) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index c9b1475..6f016f2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -682,7 +682,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   }
 
   public void setLimitValue(Integer value) {
-    qb.getParseInfo().setDestLimit(getClause(), value);
+    qb.getParseInfo().setDestLimit(getClause(), 0, value);
   }
 
   private String getStorageStringWithAlias(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
@@ -1144,12 +1144,11 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   }
 
   public String getInsertClause() {
-    String insertString = "";
     ASTNode destTree = qb.getParseInfo().getDestForClause(clauseName);
     if (destTree != null && ((ASTNode) (destTree.getChild(0))).getToken().getType() != TOK_TMP_FILE) {
-      insertString = "INSERT OVERWRITE" + HQLParser.getString(qb.getParseInfo().getDestForClause(clauseName));
+      return "INSERT OVERWRITE" + HQLParser.getString(destTree);
     }
-    return insertString;
+    return "";
   }
 
   public void addExprToAlias(ASTNode expr, ASTNode alias) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
index ebbe404..fc96055 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.parse.*;
 
 import lombok.Getter;
@@ -39,7 +40,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   private QB cubeQB;
 
   public CubeSemanticAnalyzer(Configuration queryConf, HiveConf hiveConf) throws SemanticException {
-    super(hiveConf);
+    super(new QueryState(hiveConf));
     this.queryConf = queryConf;
     this.hiveConf = hiveConf;
     setupRules();
@@ -52,7 +53,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
 
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    reset();
+    reset(true);
     cubeQB = new QB(null, null, false);
 
     if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
@@ -65,7 +66,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
       }
     }
     // analyzing from the ASTNode.
-    if (!doPhase1(ast, cubeQB, initPhase1Ctx())) {
+    if (!doPhase1(ast, cubeQB, initPhase1Ctx(), null)) {
       // if phase1Result false return
       return;
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 1f6b66c..6c2a168 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -545,17 +545,16 @@ public final class HQLParser {
       }
 
     } else if (TOK_DIR == rootType) {
-      buf.append(" directory ");
+      StringBuilder sb = new StringBuilder();
+      boolean local = false;
       for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
-      }
-
-    } else if (TOK_LOCAL_DIR == rootType) {
-      buf.append(" local directory ");
-      for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        if (root.getChild(i).getType() == KW_LOCAL) {
+          local = true;
+        } else {
+          toInfixString((ASTNode) root.getChild(i), sb);
+        }
       }
-
+      buf.append(local ? " local": "").append(" directory ").append(sb);
     } else if (TOK_TAB == rootType) {
       buf.append(" table ");
       for (int i = 0; i < root.getChildCount(); i++) {
@@ -766,8 +765,13 @@ public final class HQLParser {
       assert (node.getChildCount() != 0);
       if (node.getChild(0).getType() == HiveParser.Identifier) {
         String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
-        if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
-          return true;
+        try {
+          if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
+            return true;
+          }
+        } catch (SemanticException e) {
+          log.error("Error trying to find whether {} is aggregate.", getString(node), e);
+          return false;
         }
       }
     }
@@ -782,8 +786,13 @@ public final class HQLParser {
       assert (node.getChildCount() != 0);
       if (node.getChild(0).getType() == HiveParser.Identifier) {
         String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
-        if (FunctionRegistry.getGenericUDAFResolver(functionName) == null) {
-          return true;
+        try {
+          if (FunctionRegistry.getGenericUDAFResolver(functionName) == null) {
+            return true;
+          }
+        } catch (SemanticException e) {
+          log.error("Error trying to find whether {} is udf node.", getString(node), e);
+          return false;
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 02571ee..1ed6258 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -1187,7 +1187,7 @@ public class TestCubeMetastoreClient {
 
     CubeFactTable factTable = new CubeFactTable(Hive.get(conf).getTable(factName));
     factTable.alterColumn(new FieldSchema("testFactColAdd", "int", "test add column"));
-    factTable.alterColumn(new FieldSchema("msr3", "int", "test alter column"));
+    factTable.alterColumn(new FieldSchema("msr1", "float", "test alter column"));
     factTable.alterWeight(100L);
     Map<String, String> newProp = getHashMap("new.prop", "val");
     factTable.addProperties(newProp);
@@ -1210,13 +1210,18 @@ public class TestCubeMetastoreClient {
     assertTrue(altered.getUpdatePeriods().get(c2).contains(HOURLY));
     assertTrue(altered.getCubeName().equalsIgnoreCase(CUBE_NAME.toLowerCase()));
     boolean contains = false;
+    boolean msr1Altered = false;
     for (FieldSchema column : altered.getColumns()) {
       if (column.getName().equals("testfactcoladd") && column.getType().equals("int")) {
         contains = true;
         break;
       }
+      if (column.getName().equals("msr1") && column.getType().equals("float")) {
+        msr1Altered = true;
+      }
     }
-    assertTrue(contains);
+    assertTrue(contains, "column did not get added");
+    assertTrue(msr1Altered, "measure type did not get altered");
 
     // alter storage table desc
     String c1TableName = getFactOrDimtableStorageTableName(factName, c1);
@@ -2530,7 +2535,7 @@ public class TestCubeMetastoreClient {
     client.createCubeDimensionTable(zipDim.getName(), dimTblName, dimColumns, 100L, dumpPeriods, null, storageTables);
 
     CubeDimensionTable dimTable = client.getDimensionTable(dimTblName);
-    dimTable.alterColumn(new FieldSchema("testAddDim", "string", "test add column"));
+    dimTable.alterColumn(new FieldSchema("testAddDim", "int", "test add column"));
 
     List<CubeDimensionTable> tbls = client.getAllDimensionTables(zipDim);
     boolean found = false;
@@ -2549,7 +2554,7 @@ public class TestCubeMetastoreClient {
     List<FieldSchema> columns = altered.getColumns();
     boolean contains = false;
     for (FieldSchema column : columns) {
-      if (column.getName().equals("testadddim") && column.getType().equals("string")) {
+      if (column.getName().equals("testadddim") && column.getType().equals("int")) {
         contains = true;
         break;
       }
@@ -2557,13 +2562,13 @@ public class TestCubeMetastoreClient {
     assertTrue(contains);
 
     // Test alter column
-    dimTable.alterColumn(new FieldSchema("testAddDim", "int", "change type"));
+    dimTable.alterColumn(new FieldSchema("testAddDim", "float", "change type"));
     client.alterCubeDimensionTable(dimTblName, dimTable, storageTables);
 
     altered = new CubeDimensionTable(Hive.get(conf).getTable(dimTblName));
     boolean typeChanged = false;
     for (FieldSchema column : altered.getColumns()) {
-      if (column.getName().equals("testadddim") && column.getType().equals("int")) {
+      if (column.getName().equals("testadddim") && column.getType().equals("float")) {
         typeChanged = true;
         break;
       }
@@ -2586,7 +2591,7 @@ public class TestCubeMetastoreClient {
     assertEquals(alteredC1Table.getInputFormatClass(), SequenceFileInputFormat.class);
     boolean storageTblColAltered = false;
     for (FieldSchema column : alteredC1Table.getAllCols()) {
-      if (column.getName().equals("testadddim") && column.getType().equals("int")) {
+      if (column.getName().equals("testadddim") && column.getType().equals("float")) {
         storageTblColAltered = true;
         break;
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index bc4f2a1..d17c18f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -469,23 +469,23 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   @Test
   public void testMultiFactQueryWithExprOnDimsWithoutAliases() throws Exception {
     String hqlQuery =
-      rewrite("select func1(dim1), func2(dim1), msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE, conf);
+      rewrite("select reverse(dim1), ltrim(dim1), msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE, conf);
     String expected1 =
-      getExpectedQuery(cubeName, "select func1(basecube.dim1) as `expr1`, func2(basecube.dim1)  as `expr2`,"
+      getExpectedQuery(cubeName, "select reverse(basecube.dim1) as `expr1`, ltrim(basecube.dim1)  as `expr2`,"
         + " sum(basecube.msr12) as `msr12` FROM ", null,
-        " group by func1(basecube.dim1), func2(basecube.dim1)",
+        " group by reverse(basecube.dim1), ltrim(basecube.dim1)",
         getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 =
-      getExpectedQuery(cubeName, "select func1(basecube.dim1) as `expr1`, func2(basecube.dim1)  as `expr2`,"
+      getExpectedQuery(cubeName, "select reverse(basecube.dim1) as `expr1`, ltrim(basecube.dim1)  as `expr2`,"
         + " round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
-        " group by func1(basecube.dim1), func2(basecube.dim1)",
+        " group by reverse(basecube.dim1), ltrim(basecube.dim1)",
         getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
-    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.expr1, mq2.expr1) `func1(dim1)`,"
-      + " coalesce(mq1.expr2, mq2.expr2) `func2(dim1)`, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
-      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.expr1, mq2.expr1) `func1(dim1)`,"
-        + " coalesce(mq1.expr2, mq2.expr2) `func2(dim1)`, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "),
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.expr1, mq2.expr1) `reverse(dim1)`,"
+      + " coalesce(mq1.expr2, mq2.expr2) `ltrim(dim1)`, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.expr1, mq2.expr1) `reverse(dim1)`,"
+        + " coalesce(mq1.expr2, mq2.expr2) `ltrim(dim1)`, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "),
       hqlQuery);
     assertTrue(hqlQuery.contains("mq1 full outer join ")
       && hqlQuery.endsWith("mq2 on mq1.expr1 <=> mq2.expr1 AND mq1.expr2 <=> mq2.expr2"), hqlQuery);
@@ -494,23 +494,23 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   @Test
   public void testMultiFactQueryWithDirectMsr() throws Exception {
     String hqlQuery =
-      rewrite("select func1(dim1), directMsrExpr as directMsr, roundedmsr2 from basecube where " + TWO_DAYS_RANGE,
+      rewrite("select reverse(dim1), directMsrExpr as directMsr, roundedmsr2 from basecube where " + TWO_DAYS_RANGE,
         conf);
     String expected1 =
-      getExpectedQuery(cubeName, "select func1(basecube.dim1) as `expr1`, max(basecube.msr13) + count(basecube . msr14)"
-        + " as `expr2` FROM ", null,
-        " group by func1(basecube.dim1)", getWhereForDailyAndHourly2days(cubeName, "C1_testFact3_BASE"));
+      getExpectedQuery(cubeName, "select reverse(basecube.dim1) as `expr1`, "
+        + "max(basecube.msr13) + count(basecube . msr14) as `expr2` FROM ", null,
+        " group by reverse(basecube.dim1)", getWhereForDailyAndHourly2days(cubeName, "C1_testFact3_BASE"));
     String expected2 =
-      getExpectedQuery(cubeName, "select func1(basecube.dim1) as expr1, round(sum(basecube.msr2)/1000) as `roundedmsr2`"
-        + " FROM ", null, " group by func1(basecube.dim1)",
+      getExpectedQuery(cubeName, "select reverse(basecube.dim1) as expr1, "
+        + "round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null, " group by reverse(basecube.dim1)",
         getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
-      "select coalesce(mq1.expr1, mq2.expr1) `func1(dim1)`, mq2.expr2 `directmsr`, mq1.roundedmsr2 roundedmsr2 "
+      "select coalesce(mq1.expr1, mq2.expr1) `reverse(dim1)`, mq2.expr2 `directmsr`, mq1.roundedmsr2 roundedmsr2 "
         + "from ")
       || hqlQuery.toLowerCase().startsWith(
-        "select coalesce(mq1.expr1, mq2.expr1) `func1(dim1)`, mq1.expr2 `directmsr`, mq2.roundedmsr2 roundedmsr2 "
+        "select coalesce(mq1.expr1, mq2.expr1) `reverse(dim1)`, mq1.expr2 `directmsr`, mq2.roundedmsr2 roundedmsr2 "
           + "from "),
       hqlQuery.toLowerCase());
     assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.expr1 <=> mq2.expr1"),

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
index e23290f..861d4f7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -42,6 +43,12 @@ import lombok.extern.slf4j.Slf4j;
 public class TestHQLParser {
 
   HiveConf conf = new HiveConf();
+
+  {
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS, false);
+    SessionState.start(conf);
+  }
+
   @Test
   public void testGroupByOrderByGetString() throws Exception {
     String query = "SELECT a,b, sum(c) FROM tab GROUP BY a,f(b), d+e ORDER BY a, g(b), e/100";
@@ -216,35 +223,19 @@ public class TestHQLParser {
 
   @Test
   public void testOrderbyBrackets() throws Exception {
-    String query = "SELECT id from citytable order by ((citytable.id) asc)";
+    String query = "SELECT id from citytable order by (citytable.id) asc";
     // String hql = rewrite(driver, query);
     ASTNode tree = HQLParser.parseHQL(query, conf);
     ASTNode orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     String reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED0:" + reconstructed);
-    // Assert.assertEquals("(( citytable  .  id ) asc )", reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
-
-    String query2 = "SELECT id from citytable order by (citytable.id asc)";
-    tree = HQLParser.parseHQL(query2, conf);
-    orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
-    reconstructed = HQLParser.getString(orderByTree);
-    System.out.println("RECONSTRUCTED1:" + reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
-
+    Assert.assertEquals(reconstructed, "citytable.id asc");
     String query3 = "SELECT id, name from citytable order by citytable.id asc, citytable.name desc";
     tree = HQLParser.parseHQL(query3, conf);
     orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED2:" + reconstructed);
-    HQLParser.parseHQL("SELECT id, name FROM citytable ORDER BY " + reconstructed, conf);
-
-    String query4 = "SELECT id from citytable order by citytable.id";
-    tree = HQLParser.parseHQL(query4, conf);
-    orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
-    reconstructed = HQLParser.getString(orderByTree);
-    System.out.println("RECONSTRUCTED3:" + reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
+    Assert.assertEquals(reconstructed, "citytable.id asc, citytable.name desc");
   }
 
   @Test
@@ -273,10 +264,10 @@ public class TestHQLParser {
   public void testAliasShouldBeQuoted() throws Exception {
     Assert.assertEquals(getSelectStrForQuery("select id as identity from sample_dim"), "id as `identity`");
     Assert.assertEquals(getSelectStrForQuery("select id as `column identity` from sample_dim"),
-        "id as `column identity`");
+      "id as `column identity`");
     Assert.assertEquals(getSelectStrForQuery("select id identity from sample_dim"), "id as `identity`");
     Assert.assertEquals(getSelectStrForQuery("select id `column identity` from sample_dim"),
-        "id as `column identity`");
+      "id as `column identity`");
   }
 
   private String getSelectStrForQuery(String query) throws Exception {
@@ -387,7 +378,7 @@ public class TestHQLParser {
 
   @DataProvider
   public Object[][] nAryFlatteningDataProvider() {
-    return new Object[][] {
+    return new Object[][]{
       {"a", "a"},
       {"a or b", "a or b"},
       {"a or b or c or d", "a or b or c or d"},
@@ -408,7 +399,7 @@ public class TestHQLParser {
 
   @DataProvider
   public Object[][] colsInExpr() {
-    return new Object[][] {
+    return new Object[][]{
       {" t1.c1", new String[]{}}, // simple selection
       {" cie.c5", new String[]{"c5"}}, // simple selection
       {" fun1(cie.c4)", new String[]{"c4"}}, // simple selection
@@ -436,7 +427,7 @@ public class TestHQLParser {
 
   @DataProvider
   public Object[][] primitiveBool() {
-    return new Object[][] {
+    return new Object[][]{
       {" t1.c1", false},
       {" t1.c1 = 24", true},
       {" t1.c1 >= 24", true},
@@ -461,7 +452,7 @@ public class TestHQLParser {
 
   @DataProvider
   public Object[][] primitiveBoolFunc() {
-    return new Object[][] {
+    return new Object[][]{
       {" t1.c1", false},
       {" t1.c1 = 24", false},
       {" t1.c1 >= 24", false},
@@ -483,4 +474,19 @@ public class TestHQLParser {
     boolean actual = HQLParser.isPrimitiveBooleanFunction(inputAST);
     Assert.assertEquals(actual, expected, "Received " + actual + " for input:" + input);
   }
+
+  @DataProvider
+  public Object[][] dirDataProvider() {
+    return new Object[][]{
+      {"directory 'a'"},
+      {"local directory 'a'"},
+    };
+  }
+
+  @Test(dataProvider = "dirDataProvider")
+  public void testLocalDirectory(String dirString) throws LensException {
+    String expr = "insert overwrite " + dirString + " select * from table";
+    ASTNode tree = HQLParser.parseHQL(expr, conf);
+    Assert.assertEquals(HQLParser.getString((ASTNode) tree.getChild(1).getChild(0)), dirString);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
index b50ebd2..1dd353f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
@@ -225,13 +225,16 @@ public class TestQuery {
     } else if (expected.query == null) {
       return false;
     }
-    boolean equals = false;
+    return stringEquals(expected) || astEquals(expected);
+  }
+
+  private boolean astEquals(TestQuery expected) {
     try {
-      equals = equalsAST(this.getAST(), expected.getAST());
+      return equalsAST(this.getAST(), expected.getAST());
     } catch (LensException e) {
       log.error("AST not valid", e);
+      return false;
     }
-    return equals || stringEquals(expected);
   }
 
   private boolean stringEquals(TestQuery expected) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/java/org/apache/lens/cube/query/cost/TestFactPartitionBasedQueryCostCalculator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/query/cost/TestFactPartitionBasedQueryCostCalculator.java b/lens-cube/src/test/java/org/apache/lens/cube/query/cost/TestFactPartitionBasedQueryCostCalculator.java
index 8a60224..262d452 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/query/cost/TestFactPartitionBasedQueryCostCalculator.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/query/cost/TestFactPartitionBasedQueryCostCalculator.java
@@ -56,7 +56,7 @@ public class TestFactPartitionBasedQueryCostCalculator {
     when(driver.getConf()).thenReturn(new Configuration());
     queryContext = mock(AbstractQueryContext.class);
 
-    ImmutableMap<String, Double> tableWeights = mock(ImmutableMap.class);
+    ImmutableMap<String, Double> tableWeights = new ImmutableMap.Builder<String, Double>().build();
 
     FactPartition fp1 = mockFactPartition(DAILY, tableWeights, 0.7);
     FactPartition fp2 = mockFactPartition(HOURLY, tableWeights, 0.8);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-cube/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/resources/hive-site.xml b/lens-cube/src/test/resources/hive-site.xml
index 8456f8b..d21e61c 100644
--- a/lens-cube/src/test/resources/hive-site.xml
+++ b/lens-cube/src/test/resources/hive-site.xml
@@ -36,6 +36,11 @@
   </property>
 
   <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
     <name>hive.querylog.location</name>
     <value>${project.build.directory}/query_logs</value>
     <description>Location of the structured hive logs</description>
@@ -47,4 +52,9 @@
     <description>JDBC connect string for a JDBC metastore</description>
   </property>
 
+  <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/pom.xml
----------------------------------------------------------------------
diff --git a/lens-driver-es/pom.xml b/lens-driver-es/pom.xml
index 89736ab..b00e78b 100644
--- a/lens-driver-es/pom.xml
+++ b/lens-driver-es/pom.xml
@@ -19,60 +19,81 @@
   under the License.
 
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.lens</groupId>
-        <artifactId>apache-lens</artifactId>
-        <version>2.6.0-beta-SNAPSHOT</version>
-    </parent>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.lens</groupId>
+    <artifactId>apache-lens</artifactId>
+    <version>2.6.0-beta-SNAPSHOT</version>
+  </parent>
 
-    <artifactId>lens-driver-es</artifactId>
-    <packaging>jar</packaging>
-    <description>ES execution driver</description>
+  <artifactId>lens-driver-es</artifactId>
+  <packaging>jar</packaging>
+  <description>ES execution driver</description>
 
-    <name>Lens Elastic Search Driver</name>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-service</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.lens</groupId>
-            <artifactId>lens-cube</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <artifactId>lens-api</artifactId>
-            <groupId>org.apache.lens</groupId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.datatype</groupId>
-            <artifactId>jackson-datatype-guava</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.searchbox</groupId>
-            <artifactId>jest</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpclient</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpcore</artifactId>
-        </dependency>
-    </dependencies>
+  <name>Lens Elastic Search Driver</name>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.lens</groupId>
+      <artifactId>lens-cube</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <artifactId>lens-api</artifactId>
+      <groupId>org.apache.lens</groupId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.datatype</groupId>
+      <artifactId>jackson-datatype-guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.testng</groupId>
+      <artifactId>testng</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>io.searchbox</groupId>
+      <artifactId>jest</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
+  </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/main/java/org/apache/lens/driver/es/client/jest/JestResultSetTransformer.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/main/java/org/apache/lens/driver/es/client/jest/JestResultSetTransformer.java b/lens-driver-es/src/main/java/org/apache/lens/driver/es/client/jest/JestResultSetTransformer.java
index 38d91f9..d8644ba 100644
--- a/lens-driver-es/src/main/java/org/apache/lens/driver/es/client/jest/JestResultSetTransformer.java
+++ b/lens-driver-es/src/main/java/org/apache/lens/driver/es/client/jest/JestResultSetTransformer.java
@@ -26,8 +26,8 @@ import org.apache.lens.driver.es.client.ESResultSet;
 import org.apache.lens.server.api.driver.LensResultSetMetadata;
 
 import org.apache.commons.lang3.Validate;
+import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hive.service.cli.ColumnDescriptor;
-import org.apache.hive.service.cli.Type;
 import org.apache.hive.service.cli.TypeDescriptor;
 
 import com.google.common.collect.Lists;

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
index ab6f22b..61ee84a 100644
--- a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
+++ b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
@@ -21,6 +21,8 @@ package org.apache.lens.driver.es;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.testng.annotations.BeforeTest;
 
@@ -37,6 +39,9 @@ public abstract class ESDriverTest {
     esDriverConfig = new ESDriverConfig(config);
     driver.configure(config, "es", "es1");
     mockClientES = (MockClientES) driver.getESClient();
+    HiveConf conf = new HiveConf();
+    conf.addResource(config);
+    SessionState.start(conf);
   }
 
   protected abstract void initializeConfig(Configuration config);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/test/java/org/apache/lens/driver/es/MockClientES.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/java/org/apache/lens/driver/es/MockClientES.java b/lens-driver-es/src/test/java/org/apache/lens/driver/es/MockClientES.java
index 77300f9..27b577a 100644
--- a/lens-driver-es/src/test/java/org/apache/lens/driver/es/MockClientES.java
+++ b/lens-driver-es/src/test/java/org/apache/lens/driver/es/MockClientES.java
@@ -26,8 +26,8 @@ import org.apache.lens.driver.es.client.ESResultSet;
 import org.apache.lens.server.api.driver.LensResultSetMetadata;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hive.service.cli.ColumnDescriptor;
-import org.apache.hive.service.cli.Type;
 import org.apache.hive.service.cli.TypeDescriptor;
 
 import com.google.common.collect.ImmutableMap;

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/test/java/org/apache/lens/driver/es/ResultSetTransformationTest.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ResultSetTransformationTest.java b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ResultSetTransformationTest.java
index 0b78639..bb8638d 100644
--- a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ResultSetTransformationTest.java
+++ b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ResultSetTransformationTest.java
@@ -31,8 +31,8 @@ import org.apache.lens.server.api.driver.LensResultSetMetadata;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hive.service.cli.ColumnDescriptor;
-import org.apache.hive.service.cli.Type;
 import org.apache.hive.service.cli.TypeDescriptor;
 
 import org.testng.annotations.BeforeTest;

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/resources/hive-site.xml b/lens-driver-es/src/test/resources/hive-site.xml
new file mode 100644
index 0000000..e49c876
--- /dev/null
+++ b/lens-driver-es/src/test/resources/hive-site.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
+    <name>hive.querylog.location</name>
+    <value>${project.build.directory}/query_logs</value>
+    <description>Location of the structured hive logs</description>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-es/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/resources/logback.xml b/lens-driver-es/src/test/resources/logback.xml
new file mode 100644
index 0000000..c32886d
--- /dev/null
+++ b/lens-driver-es/src/test/resources/logback.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<configuration>
+  <appender name="TEST_LOG_FILE" class="ch.qos.logback.core.FileAppender">
+    <file>target/test.log</file>
+    <encoder>
+      <pattern>%d{dd MMM yyyy HH:mm:ss,SSS} [%t] %-5p %c %L - %m%n</pattern>
+    </encoder>
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>INFO</level>
+    </filter>
+  </appender>
+  <root level="INFO">
+    <appender-ref ref="TEST_LOG_FILE"/>
+  </root>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/pom.xml
----------------------------------------------------------------------
diff --git a/lens-driver-hive/pom.xml b/lens-driver-hive/pom.xml
index e80fab8..9809953 100644
--- a/lens-driver-hive/pom.xml
+++ b/lens-driver-hive/pom.xml
@@ -43,6 +43,7 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
+          <argLine>-Xms256m -Xmx512m -XX:PermSize=256m -XX:MaxPermSize=256m</argLine>
           <systemPropertyVariables>
             <hadoop.bin.path>${pom.basedir}/testutils/hadoop</hadoop.bin.path>
           </systemPropertyVariables>
@@ -125,6 +126,22 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
   </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
index 0b46485..5f7252a 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
@@ -18,24 +18,200 @@
  */
 package org.apache.lens.driver.hive;
 
-import java.io.IOException;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.cli.*;
 import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.apache.hive.service.rpc.thrift.TCLIService;
 
 /**
  * The Class EmbeddedThriftConnection.
  */
 public class EmbeddedThriftConnection implements ThriftConnection {
+  public static class SessionStateContext implements AutoCloseable {
+    /**
+     * This is needed because we're using embedded mode. In opening a hive session, a new session state is started
+     * and previous session state is lost, since it's all happening in the same jvm.
+     * For all other session operations (getting status, getting result etc),
+     * they are wrapped in acquire-release block in HiveSessionImpl,
+     * and the release clears session state for the current thread.
+     * Since it's happening in a single thread, the session is cleared for further operations too
+     * and needs to be restored for tests to proceed further.
+     */
+    private SessionState state = SessionState.get();
+
+    @Override
+    public void close() {
+      if (state != null && !state.equals(SessionState.get())) {
+        SessionState.setCurrentSessionState(state);
+      }
+    }
+  }
+  //SUSPEND CHECKSTYLE CHECK InnerAssignmentCheck
+  public static class EmbeddedThriftCLIServiceClient extends ThriftCLIServiceClient {
+
+    public EmbeddedThriftCLIServiceClient(TCLIService.Iface cliService) {
+      super(cliService);
+    }
+
+    public SessionHandle openSession(String username, String password,
+      Map<String, String> configuration) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.openSession(username, password, configuration);
+      }
+    }
+
+    public SessionHandle openSessionWithImpersonation(String username, String password,
+      Map<String, String> configuration, String delegationToken) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.openSessionWithImpersonation(username, password, configuration, delegationToken);
+      }
+    }
+
+    public void closeSession(SessionHandle sessionHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        super.closeSession(sessionHandle);
+      }
+    }
+
+    public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getInfo(sessionHandle, infoType);
+      }
+    }
+
+    public OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+      Map<String, String> confOverlay) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.executeStatement(sessionHandle, statement, confOverlay);
+      }
+    }
+
+    public OperationHandle executeStatementAsync(SessionHandle sessionHandle,
+      String statement, Map<String, String> confOverlay) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.executeStatementAsync(sessionHandle, statement, confOverlay);
+      }
+    }
+
+    public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getTypeInfo(sessionHandle);
+      }
+    }
+
+    public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getCatalogs(sessionHandle);
+      }
+    }
+
+    public OperationHandle getSchemas(SessionHandle sessionHandle,
+      String catalogName, String schemaName) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getSchemas(sessionHandle, catalogName, schemaName);
+      }
+    }
+
+    public OperationHandle getTables(SessionHandle sessionHandle,
+      String catalogName, String schemaName, String tableName, List<String> tableTypes) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getTables(sessionHandle, catalogName, schemaName, tableName, tableTypes);
+      }
+    }
+
+    public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getTableTypes(sessionHandle);
+      }
+    }
+
+    public OperationHandle getColumns(SessionHandle sessionHandle,
+      String catalogName, String schemaName, String tableName, String columnName) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getColumns(sessionHandle, catalogName, schemaName, tableName, columnName);
+      }
+    }
+
+    public OperationHandle getFunctions(SessionHandle sessionHandle,
+      String catalogName, String schemaName, String functionName) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getFunctions(sessionHandle, catalogName, schemaName, functionName);
+      }
+    }
+
+    public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        super.cancelOperation(opHandle);
+      }
+    }
+
+    public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        super.closeOperation(opHandle);
+      }
+    }
+
+    public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getResultSetMetadata(opHandle);
+      }
+    }
+
+    public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.fetchResults(opHandle);
+      }
+    }
+
+    public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+      long maxRows, FetchType fetchType) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.fetchResults(opHandle, orientation, maxRows, fetchType);
+      }
+    }
+
+    public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+      String owner, String renewer) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.getDelegationToken(sessionHandle, authFactory, owner, renewer);
+      }
+    }
+
+    public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+      String tokenStr) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        super.cancelDelegationToken(sessionHandle, authFactory, tokenStr);
+      }
+    }
+
+    public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+      String tokenStr) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        super.renewDelegationToken(sessionHandle, authFactory, tokenStr);
+      }
+    }
+
+    @Override
+    public SessionHandle openSession(String username, String password) throws HiveSQLException {
+      try (SessionStateContext ignored = new SessionStateContext()) {
+        return super.openSession(username, password);
+      }
+    }
+  }
 
   /** The client. */
-  private ThriftCLIServiceClient client;
+  private EmbeddedThriftCLIServiceClient client;
 
   /** The connected. */
-  private boolean connected;
+  private EmbeddedThriftBinaryCLIService service;
 
   /*
    * (non-Javadoc)
@@ -44,24 +220,34 @@ public class EmbeddedThriftConnection implements ThriftConnection {
    */
   @Override
   public ThriftCLIServiceClient getClient() throws LensException {
-    if (!connected) {
-      client = new ThriftCLIServiceClient(new EmbeddedThriftBinaryCLIService());
-      connected = true;
+    if (client == null) {
+      client = new EmbeddedThriftCLIServiceClient(getService());
     }
     return client;
   }
 
+  private EmbeddedThriftBinaryCLIService getService() {
+    if (service == null) {
+      service = new EmbeddedThriftBinaryCLIService();
+    }
+    return service;
+  }
+
   /*
    * (non-Javadoc)
    *
    * @see java.io.Closeable#close()
    */
   @Override
-  public void close() throws IOException {
+  public void close() {
     // Does nothing
   }
 
   @Override
   public void init(HiveConf conf, String user) {
+    try (SessionStateContext ignored = new SessionStateContext()) {
+      getService().init(conf);
+    }
   }
+  //RESUME CHECKSTYLE CHECK InnerAssignmentCheck
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 19a010e..1463cc2 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -25,10 +25,7 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.ObjectInput;
 import java.io.ObjectOutput;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.Lock;
@@ -61,17 +58,18 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.TaskStatus;
+import org.apache.hadoop.hive.ql.QueryDisplay.TaskDisplay;
+import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.cli.*;
-import org.apache.hive.service.cli.thrift.TOperationHandle;
-import org.apache.hive.service.cli.thrift.TProtocolVersion;
-import org.apache.hive.service.cli.thrift.TSessionHandle;
-
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
+import org.apache.hive.service.rpc.thrift.TOperationHandle;
+import org.apache.hive.service.rpc.thrift.TProtocolVersion;
+import org.apache.hive.service.rpc.thrift.TSessionHandle;
 
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableSet;
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
@@ -326,10 +324,10 @@ public class HiveDriver extends AbstractLensDriver {
    */
   public HiveDriver() throws LensException {
     this.sessionLock = new ReentrantLock();
-    lensToHiveSession = new HashMap<String, SessionHandle>();
-    opHandleToSession = new ConcurrentHashMap<OperationHandle, SessionHandle>();
-    orphanedHiveSessions = new ConcurrentLinkedQueue<SessionHandle>();
-    resourcesAddedForSession = new HashMap<SessionHandle, Boolean>();
+    lensToHiveSession = new HashMap<>();
+    opHandleToSession = new ConcurrentHashMap<>();
+    orphanedHiveSessions = new ConcurrentLinkedQueue<>();
+    resourcesAddedForSession = new HashMap<>();
     connectionExpiryThread.setDaemon(true);
     connectionExpiryThread.setName("HiveDriver-ConnectionExpiryThread");
     connectionExpiryThread.start();
@@ -515,15 +513,13 @@ public class HiveDriver extends AbstractLensDriver {
       Configuration qdconf = ctx.getDriverConf(this);
       qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
       SessionHandle sessionHandle = getSession(ctx);
-      op = getClient().executeStatement(sessionHandle, ctx.getSelectedDriverQuery(),
-        qdconf.getValByRegex(".*"));
+      op = getClient().executeStatement(sessionHandle, ctx.getSelectedDriverQuery(), qdconf.getValByRegex(".*"));
       log.info("The hive operation handle: {}", op);
       ctx.setDriverOpHandle(op.toString());
       hiveHandles.put(ctx.getQueryHandle(), op);
       opHandleToSession.put(op, sessionHandle);
       updateStatus(ctx);
       OperationStatus status = getClient().getOperationStatus(op);
-
       if (status.getState() == OperationState.ERROR) {
         throw new LensException("Unknown error while running query " + ctx.getUserQuery());
       }
@@ -597,9 +593,9 @@ public class HiveDriver extends AbstractLensDriver {
     try {
       // Get operation status from hive server
       log.debug("GetStatus hiveHandle: {}", hiveHandle);
+      fetchLogs(hiveHandle);
       OperationStatus opStatus = getClient().getOperationStatus(hiveHandle);
       log.debug("GetStatus on hiveHandle: {} returned state:", hiveHandle, opStatus.getState().name());
-
       switch (opStatus.getState()) {
       case CANCELED:
         context.getDriverStatus().setState(DriverQueryState.CANCELED);
@@ -643,17 +639,18 @@ public class HiveDriver extends AbstractLensDriver {
       String errorMsg = null;
       if (StringUtils.isNotBlank(jsonTaskStatus)) {
         ObjectMapper mapper = new ObjectMapper();
+        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
         in = new ByteArrayInputStream(jsonTaskStatus.getBytes("UTF-8"));
-        List<TaskStatus> taskStatuses = mapper.readValue(in, new TypeReference<List<TaskStatus>>() {
+        List<TaskDisplay> taskStatuses = mapper.readValue(in, new TypeReference<List<TaskDisplay>>() {
         });
         int completedTasks = 0;
         StringBuilder errorMessage = new StringBuilder();
-        for (TaskStatus taskStat : taskStatuses) {
-          String tstate = taskStat.getTaskState();
-          if ("FINISHED_STATE".equalsIgnoreCase(tstate)) {
+        for (TaskDisplay taskStat : taskStatuses) {
+          Task.TaskState tstate = taskStat.taskState;
+          if (tstate == Task.TaskState.FINISHED) {
             completedTasks++;
           }
-          if ("FAILED_STATE".equalsIgnoreCase(tstate)) {
+          if (taskStat.getErrorMsg() != null) {
             appendTaskIds(errorMessage, taskStat);
             errorMessage.append(" has failed! ");
           }
@@ -689,15 +686,27 @@ public class HiveDriver extends AbstractLensDriver {
     }
   }
 
+  private void fetchLogs(OperationHandle opHandle) throws LensException {
+    try {
+      for (Object[] o : getClient().fetchResults(opHandle, FetchOrientation.FETCH_NEXT, -1, FetchType.LOG)) {
+        for (Object logLine : o) {
+          log.info("Update from hive: " + String.valueOf(logLine));
+        }
+      }
+    } catch (HiveSQLException e) {
+      log.error("Error fetching hive operation logs for {}", opHandle, e);
+    }
+  }
+
   /**
    * Append task ids.
    *
    * @param message  the message
    * @param taskStat the task stat
    */
-  private void appendTaskIds(StringBuilder message, TaskStatus taskStat) {
+  private void appendTaskIds(StringBuilder message, TaskDisplay taskStat) {
     message.append(taskStat.getTaskId()).append("(");
-    message.append(taskStat.getType()).append("):");
+    message.append(taskStat.getTaskType()).append("):");
     if (taskStat.getExternalHandle() != null) {
       message.append(taskStat.getExternalHandle()).append(":");
     }
@@ -728,6 +737,7 @@ public class HiveDriver extends AbstractLensDriver {
     if (opHandle != null) {
       log.info("CloseQuery hiveHandle: {}", opHandle);
       try {
+        fetchLogs(opHandle);
         getClient().closeOperation(opHandle);
       } catch (HiveSQLException e) {
         checkInvalidOperation(handle, e);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveInMemoryResultSet.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveInMemoryResultSet.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveInMemoryResultSet.java
index 4d52e22..b39f93c 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveInMemoryResultSet.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveInMemoryResultSet.java
@@ -112,7 +112,7 @@ public class HiveInMemoryResultSet extends InMemoryResultSet {
   public boolean hasNext() throws LensException {
     if (fetchedRowsItr == null || !fetchedRowsItr.hasNext()) {
       try {
-        rowSet = client.fetchResults(opHandle, orientation, fetchSize);
+        rowSet = client.fetchResults(opHandle, orientation, fetchSize, FetchType.QUERY_OUTPUT);
         orientation = FetchOrientation.FETCH_NEXT;
         noMoreResults = rowSet.numRows() == 0;
         if (noMoreResults) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
index f53e2b6..965c5b1 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
@@ -139,7 +140,20 @@ public class HiveQueryPlan extends DriverQueryPlan {
         }
         break;
       case TABLE_SCAN:
-        // no op
+        int indentation = getIndentation(explainOutput.get(i));
+        i++;
+        for (; i < explainOutput.size(); i++) {
+          if (explainOutput.get(i).trim().startsWith("alias: ")) {
+            String tableName = explainOutput.get(i).trim().substring(7);
+            if (!tableName.contains(".") && SessionState.get() != null) {
+              tableName = SessionState.get().getCurrentDatabase() + "." + tableName;
+            }
+            addTableToTablesQueried(tableName, metastore);
+          }
+          if (i + 1 < explainOutput.size() && getIndentation(explainOutput.get(i+1)) <= indentation) {
+            break;
+          }
+        }
         break;
       case PARTITION:
         String partConditionStr = null;
@@ -162,22 +176,7 @@ public class HiveQueryPlan extends DriverQueryPlan {
           if (explainOutput.get(i).trim().startsWith("name:")) {
             String table = explainOutput.get(i).trim().substring("name:".length()).trim();
             // update tables queried and weights
-            if (!tablesQueried.contains(table)) {
-              Table tbl = metastore.getTable(table, false);
-              if (tbl == null) {
-                // table not found, possible case if query is create table
-                log.info("Table {} not found while extracting plan details", table);
-                continue;
-              }
-              tablesQueried.add(table);
-              String costStr = tbl.getParameters().get(LensConfConstants.STORAGE_COST);
-
-              Double weight = 1d;
-              if (costStr != null) {
-                weight = Double.parseDouble(costStr);
-              }
-              tableWeights.put(table, weight);
-            }
+            addTableToTablesQueried(table, metastore);
 
             if (partConditionStr != null) {
               Set<String> tablePartitions = (Set<String>) partitions.get(table);
@@ -201,6 +200,34 @@ public class HiveQueryPlan extends DriverQueryPlan {
     }
   }
 
+  private void addTableToTablesQueried(String table, Hive metastore) throws HiveException {
+    if (!tablesQueried.contains(table)) {
+      Table tbl = metastore.getTable(table, false);
+      if (tbl == null) {
+        // table not found, possible case if query is create table
+        log.info("Table {} not found while extracting plan details", table);
+        return;
+      }
+      tablesQueried.add(table);
+      String costStr = tbl.getParameters().get(LensConfConstants.STORAGE_COST);
+
+      Double weight = 1d;
+      if (costStr != null) {
+        weight = Double.parseDouble(costStr);
+      }
+      tableWeights.put(table, weight);
+    }
+  }
+
+  private int getIndentation(String s) {
+    for(int i = 0; i < s.length(); i++) {
+      if (s.charAt(i) != ' ') {
+        return i - 1;
+      }
+    }
+    return s.length();
+  }
+
   /**
    * Next state.
    *
@@ -227,7 +254,7 @@ public class HiveQueryPlan extends DriverQueryPlan {
       return ParserState.GROUPBY_EXPRS;
     } else if (tr.startsWith("keys:") && state == ParserState.GROUPBY_EXPRS) {
       return ParserState.GROUPBY_KEYS;
-    } else if (tr.equals("Path -> Partition:")) {
+    } else if (tr.equals("Path -> Partition:") || tr.equals("Partition Description:")) {
       return ParserState.PARTITION_LIST;
     } else if (tr.equals("Partition") && state == ParserState.PARTITION_LIST) {
       return ParserState.PARTITION;

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
index 741294c..54885f7 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
@@ -37,9 +37,10 @@ public class RemoteThriftConnection implements ThriftConnection {
   private boolean connected;
 
   /** The hs2 client. */
-  private CLIServiceClient hs2Client;
+  private RetryingThriftCLIServiceClient.CLIServiceClientWrapper hs2Client;
 
   private HiveConf conf;
+
   /**
    * Instantiates a new remote thrift connection.
    */
@@ -86,8 +87,8 @@ public class RemoteThriftConnection implements ThriftConnection {
   @Override
   public void close() {
     connected = false;
-    if (hs2Client instanceof RetryingThriftCLIServiceClient.CLIServiceClientWrapper) {
-      ((RetryingThriftCLIServiceClient.CLIServiceClientWrapper) hs2Client).closeTransport();
+    if (hs2Client != null) {
+      hs2Client.closeTransport();
     }
   }
 }