You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/03/16 08:24:33 UTC

[1/2] incubator-lens git commit: LENS-408 : Creating HiveConf multiple times unnecessarily should be avoided (amareshwari)

Repository: incubator-lens
Updated Branches:
  refs/heads/master 8578c8fc3 -> 51c52eb3e


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
index 4de5bd7..bfb9e9d 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/EmbeddedThriftConnection.java
@@ -43,7 +43,7 @@ public class EmbeddedThriftConnection implements ThriftConnection {
    * @see org.apache.lens.driver.hive.ThriftConnection#getClient(org.apache.hadoop.hive.conf.HiveConf)
    */
   @Override
-  public ThriftCLIServiceClient getClient(HiveConf conf) throws LensException {
+  public ThriftCLIServiceClient getClient() throws LensException {
     if (!connected) {
       client = new ThriftCLIServiceClient(new EmbeddedThriftBinaryCLIService());
       connected = true;
@@ -60,4 +60,8 @@ public class EmbeddedThriftConnection implements ThriftConnection {
   public void close() throws IOException {
     // Does nothing
   }
+
+  @Override
+  public void init(HiveConf conf, String user) {
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index e7619ed..9e3c723 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -96,8 +96,11 @@ public class HiveDriver implements LensDriver {
   public static final float DAILY_PARTITION_WEIGHT_DEFAULT = 0.75f;
   public static final float HOURLY_PARTITION_WEIGHT_DEFAULT = 1.0f;
 
-  /** The driver conf. */
-  private HiveConf driverConf;
+  /** The driver conf- which will merged with query conf */
+  private Configuration driverConf;
+
+  /** The HiveConf - used for connecting to hive server and metastore */
+  private HiveConf hiveConf;
 
   /** The hive handles. */
   private Map<QueryHandle, OperationHandle> hiveHandles = new HashMap<QueryHandle, OperationHandle>();
@@ -314,9 +317,16 @@ public class HiveDriver implements LensDriver {
    */
   @Override
   public void configure(Configuration conf) throws LensException {
-    this.driverConf = new HiveConf(conf, HiveDriver.class);
+    this.driverConf = new Configuration(conf);
     this.driverConf.addResource("hivedriver-default.xml");
     this.driverConf.addResource("hivedriver-site.xml");
+
+    // resources have to be added separately on hiveConf again because new HiveConf() overrides hive.* properties
+    // from HiveConf
+    this.hiveConf = new HiveConf(conf, HiveDriver.class);
+    this.hiveConf.addResource("hivedriver-default.xml");
+    this.hiveConf.addResource("hivedriver-site.xml");
+
     connectionClass = this.driverConf.getClass(HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class,
       ThriftConnection.class);
     isEmbedded = (connectionClass.getName().equals(EmbeddedThriftConnection.class.getName()));
@@ -365,13 +375,13 @@ public class HiveDriver implements LensDriver {
       return (HiveQueryPlan) explainCtx.getDriverContext().getDriverQueryPlan(this);
     }
     LOG.info("Explain: " + explainCtx.getDriverQuery(this));
-    HiveConf explainConf = new HiveConf(explainCtx.getDriverConf(this), this.getClass());
+    Configuration explainConf = new Configuration(explainCtx.getDriverConf(this));
     explainConf.setClassLoader(explainCtx.getConf().getClassLoader());
     explainConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     final String explainQuery = "EXPLAIN EXTENDED " + explainCtx.getDriverQuery(this);
 
     QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery,
-      explainCtx.getSubmittedUser(), new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier());
+      explainCtx.getSubmittedUser(), new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false);
     // Get result set of explain
     HiveInMemoryResultSet inMemoryResultSet = (HiveInMemoryResultSet) execute(explainQueryCtx);
     List<String> explainOutput = new ArrayList<String>();
@@ -380,7 +390,8 @@ public class HiveDriver implements LensDriver {
     }
     closeQuery(explainQueryCtx.getQueryHandle());
     try {
-      HiveQueryPlan hqp = new HiveQueryPlan(explainOutput, null, explainConf);
+      hiveConf.setClassLoader(explainCtx.getConf().getClassLoader());
+      HiveQueryPlan hqp = new HiveQueryPlan(explainOutput, null, hiveConf);
       explainCtx.getDriverContext().setDriverQueryPlan(this, hqp);
       return hqp;
     } catch (HiveException e) {
@@ -727,22 +738,20 @@ public class HiveDriver implements LensDriver {
       if (embeddedConnection == null) {
         try {
           embeddedConnection = connectionClass.newInstance();
+          embeddedConnection.init(hiveConf, null);
         } catch (Exception e) {
           throw new LensException(e);
         }
         LOG.info("New thrift connection " + connectionClass);
       }
-      return embeddedConnection.getClient(driverConf);
+      return embeddedConnection.getClient();
     } else {
       connectionLock.lock();
       try {
-        HiveConf connectionConf = driverConf;
+        String user = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER);
         if (SessionState.get() != null && SessionState.get().getUserName() != null) {
-          connectionConf = new HiveConf(driverConf);
-          connectionConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER.varname, SessionState.get()
-            .getUserName());
+          user = SessionState.get().getUserName();
         }
-        String user = connectionConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER);
         Map<Long, ExpirableConnection> userThreads = threadConnections.get(user.toLowerCase());
         if (userThreads == null) {
           userThreads = new HashMap<Long, ExpirableConnection>();
@@ -752,6 +761,7 @@ public class HiveDriver implements LensDriver {
         if (connection == null || connection.isExpired()) {
           try {
             ThriftConnection tconn = connectionClass.newInstance();
+            tconn.init(hiveConf, user);
             connection = new ExpirableConnection(tconn, connectionExpiryTimeout);
             thriftConnExpiryQueue.offer(connection);
             userThreads.put(Thread.currentThread().getId(), connection);
@@ -766,7 +776,7 @@ public class HiveDriver implements LensDriver {
             thriftConnExpiryQueue.offer(connection);
           }
         }
-        return connection.getConnection().getClient(connectionConf);
+        return connection.getConnection().getClient();
       } finally {
         connectionLock.unlock();
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
index de4645c..a365fbe 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
@@ -41,6 +41,7 @@ public class RemoteThriftConnection implements ThriftConnection {
   /** The hs2 client. */
   private CLIServiceClient hs2Client;
 
+  private HiveConf conf;
   /**
    * Instantiates a new remote thrift connection.
    */
@@ -48,13 +49,19 @@ public class RemoteThriftConnection implements ThriftConnection {
 
   }
 
+  public void init(HiveConf conf, String user) {
+    // new HiveConf() is getting created because connection will be different for each user
+    this.conf = new HiveConf(conf);
+    this.conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER, user);
+  }
+
   /*
    * (non-Javadoc)
    *
    * @see org.apache.lens.driver.hive.ThriftConnection#getClient(org.apache.hadoop.hive.conf.HiveConf)
    */
   @Override
-  public CLIServiceClient getClient(HiveConf conf) throws LensException {
+  public CLIServiceClient getClient() throws LensException {
     if (!connected) {
       try {
         LOG.info("HiveDriver connecting to HiveServer @ "

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/ThriftConnection.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/ThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/ThriftConnection.java
index 8bf92f2..98046ce 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/ThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/ThriftConnection.java
@@ -33,9 +33,16 @@ public interface ThriftConnection extends Closeable {
   /**
    * Gets the client.
    *
-   * @param conf the conf
    * @return the client
    * @throws LensException the lens exception
    */
-  CLIServiceClient getClient(HiveConf conf) throws LensException;
+  CLIServiceClient getClient() throws LensException;
+
+  /**
+   * Initializes connection with conf.
+   *
+   * @param conf
+   * @param user
+   */
+  void init(HiveConf conf, String user);
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index df01ba7..d5dc9a3 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -183,9 +183,8 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    *
    * @throws SemanticException the semantic exception
    */
-  public void analyzeInternal() throws SemanticException {
-    HiveConf conf = new HiveConf();
-    CubeSemanticAnalyzer c1 = new CubeSemanticAnalyzer(conf);
+  public void analyzeInternal(Configuration conf, HiveConf hconf) throws SemanticException {
+    CubeSemanticAnalyzer c1 = new CubeSemanticAnalyzer(conf, hconf);
 
     QB qb = new QB(null, null, false);
 
@@ -481,7 +480,6 @@ public class ColumnarSQLRewriter implements QueryRewriter {
         ASTNode right = (ASTNode) node.getChild(1);
 
         ASTNode parentNode = (ASTNode) node.getParent();
-        HQLParser.printAST(parentNode);
 
         // Skip the join conditions used as "and" while building subquery
         // eg. inner join fact.id1 = dim.id and fact.id2 = dim.id
@@ -489,7 +487,6 @@ public class ColumnarSQLRewriter implements QueryRewriter {
           && parentNode.getChild(0).getChild(1).getType() == HiveParser.DOT
           && parentNode.getChild(1).getChild(0).getType() == HiveParser.DOT
           && parentNode.getChild(1).getChild(1).getType() == HiveParser.DOT) {
-          HQLParser.printAST(parentNode);
           return;
         }
 
@@ -821,9 +818,9 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    *
    * @throws SemanticException
    */
-  public void buildQuery(HiveConf queryConf) throws SemanticException {
-    analyzeInternal();
-    replaceWithUnderlyingStorage(queryConf, fromAST);
+  public void buildQuery(Configuration conf, HiveConf hconf) throws SemanticException {
+    analyzeInternal(conf, hconf);
+    replaceWithUnderlyingStorage(hconf, fromAST);
     replaceAliasInAST();
     getFilterInJoinCond(fromAST);
     getAggregateColumns(selectAST);
@@ -1029,9 +1026,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @see org.apache.lens.server.api.query.QueryRewriter#rewrite(java.lang.String, org.apache.hadoop.conf.Configuration)
    */
   @Override
-  public synchronized String rewrite(String query, Configuration conf) throws LensException {
-    HiveConf  queryConf = new HiveConf(conf, ColumnarSQLRewriter.class);
-    queryConf.setClassLoader(conf.getClassLoader());
+  public synchronized String rewrite(String query, Configuration conf, HiveConf metastoreConf) throws LensException {
     this.query = query;
     StringBuilder mergedQuery;
     rewrittenQuery.setLength(0);
@@ -1044,8 +1039,8 @@ public class ColumnarSQLRewriter implements QueryRewriter {
         String[] queries = query.toLowerCase().split("union all");
         for (int i = 0; i < queries.length; i++) {
           LOG.info("Union Query Part " + i + " : " + queries[i]);
-          ast = HQLParser.parseHQL(queries[i]);
-          buildQuery(queryConf);
+          ast = HQLParser.parseHQL(queries[i], metastoreConf);
+          buildQuery(conf, metastoreConf);
           mergedQuery = rewrittenQuery.append(" union all ");
           finalRewrittenQuery = mergedQuery.toString().substring(0, mergedQuery.lastIndexOf("union all"));
           reset();
@@ -1054,8 +1049,8 @@ public class ColumnarSQLRewriter implements QueryRewriter {
         LOG.info("Input Query : " + query);
         LOG.info("Rewritten Query :  " + queryReplacedUdf);
       } else {
-        ast = HQLParser.parseHQL(query);
-        buildQuery(queryConf);
+        ast = HQLParser.parseHQL(query, metastoreConf);
+        buildQuery(conf, metastoreConf);
         queryReplacedUdf = replaceUDFForDB(rewrittenQuery.toString());
         LOG.info("Input Query : " + query);
         LOG.info("Rewritten Query :  " + queryReplacedUdf);
@@ -1076,7 +1071,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    *
    * @param tree the AST tree
    */
-  protected void replaceWithUnderlyingStorage(HiveConf queryConf, ASTNode tree) {
+  protected void replaceWithUnderlyingStorage(HiveConf metastoreConf, ASTNode tree) {
     if (tree == null) {
       return;
     }
@@ -1090,9 +1085,9 @@ public class ColumnarSQLRewriter implements QueryRewriter {
           ASTNode dbIdentifier = (ASTNode) tree.getChild(0);
           ASTNode tableIdentifier = (ASTNode) tree.getChild(1);
           String lensTable = dbIdentifier.getText() + "." + tableIdentifier.getText();
-          Table tbl = CubeMetastoreClient.getInstance(queryConf).getHiveTable(lensTable);
-          String table = getUnderlyingTableName(queryConf, tbl);
-          String db = getUnderlyingDBName(queryConf, tbl);
+          Table tbl = CubeMetastoreClient.getInstance(metastoreConf).getHiveTable(lensTable);
+          String table = getUnderlyingTableName(tbl);
+          String db = getUnderlyingDBName(tbl);
 
           // Replace both table and db names
           if ("default".equalsIgnoreCase(db)) {
@@ -1108,15 +1103,15 @@ public class ColumnarSQLRewriter implements QueryRewriter {
         } else {
           ASTNode tableIdentifier = (ASTNode) tree.getChild(0);
           String lensTable = tableIdentifier.getText();
-          Table tbl = CubeMetastoreClient.getInstance(queryConf).getHiveTable(lensTable);
-          String table = getUnderlyingTableName(queryConf, tbl);
+          Table tbl = CubeMetastoreClient.getInstance(metastoreConf).getHiveTable(lensTable);
+          String table = getUnderlyingTableName(tbl);
           // Replace table name
           if (StringUtils.isNotBlank(table)) {
             tableIdentifier.getToken().setText(table);
           }
 
           // Add db name as a new child
-          String dbName = getUnderlyingDBName(queryConf, tbl);
+          String dbName = getUnderlyingDBName(tbl);
           if (StringUtils.isNotBlank(dbName) && !"default".equalsIgnoreCase(dbName)) {
             ASTNode dbIdentifier = new ASTNode(new CommonToken(HiveParser.Identifier, dbName));
             dbIdentifier.setParent(tree);
@@ -1128,7 +1123,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
       }
     } else {
       for (int i = 0; i < tree.getChildCount(); i++) {
-        replaceWithUnderlyingStorage(queryConf, (ASTNode) tree.getChild(i));
+        replaceWithUnderlyingStorage(metastoreConf, (ASTNode) tree.getChild(i));
       }
     }
   }
@@ -1140,7 +1135,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @return the underlying db name
    * @throws HiveException the hive exception
    */
-  String getUnderlyingDBName(HiveConf queryConf, Table tbl) throws HiveException {
+  String getUnderlyingDBName(Table tbl) throws HiveException {
     return tbl == null ? null : tbl.getProperty(LensConfConstants.NATIVE_DB_NAME);
   }
 
@@ -1151,7 +1146,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @return the underlying table name
    * @throws HiveException the hive exception
    */
-  String getUnderlyingTableName(HiveConf queryConf, Table tbl) throws HiveException {
+  String getUnderlyingTableName(Table tbl) throws HiveException {
     return tbl == null ? null : tbl.getProperty(LensConfConstants.NATIVE_TABLE_NAME);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index 1d68eb5..5236602 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -38,7 +38,6 @@ import org.apache.lens.api.query.QueryCost;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.cube.parse.HQLParser;
-import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
 import org.apache.lens.server.api.events.LensEventListener;
@@ -51,6 +50,7 @@ import org.apache.lens.server.api.query.QueryRewriter;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
@@ -377,7 +377,7 @@ public class JDBCDriver implements LensDriver {
      * (java.lang.String, org.apache.hadoop.conf.Configuration)
      */
     @Override
-    public String rewrite(String query, Configuration queryConf) throws LensException {
+    public String rewrite(String query, Configuration queryConf, HiveConf metastoreConf) throws LensException {
       return query;
     }
 
@@ -519,7 +519,7 @@ public class JDBCDriver implements LensDriver {
       CHECK_ALLOWED_QUERY);
     // check if it is select query
     try {
-      ASTNode ast = HQLParser.parseHQL(query);
+      ASTNode ast = HQLParser.parseHQL(query, ctx.getHiveConf());
       if (ast.getToken().getType() != HiveParser.TOK_QUERY) {
         throw new LensException("Not allowed statement:" + query);
       } else {
@@ -536,7 +536,7 @@ public class JDBCDriver implements LensDriver {
     checkForAllowedQuery.markSuccess();
 
     QueryRewriter rewriter = getQueryRewriter();
-    String rewrittenQuery = rewriter.rewrite(query, driverQueryConf);
+    String rewrittenQuery = rewriter.rewrite(query, driverQueryConf, ctx.getHiveConf());
     ctx.setFinalDriverQuery(this, rewrittenQuery);
     return rewrittenQuery;
   }
@@ -591,9 +591,7 @@ public class JDBCDriver implements LensDriver {
     checkConfigured();
     String explainQuery;
     String rewrittenQuery = rewriteQuery(explainCtx);
-    Configuration explainConf = new Configuration(explainCtx.getDriverConf(this));
-    explainConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER,
-      false);
+    Configuration explainConf = explainCtx.getDriverConf(this);
     String explainKeyword = explainConf.get(JDBC_EXPLAIN_KEYWORD_PARAM,
       DEFAULT_JDBC_EXPLAIN_KEYWORD);
     boolean explainBeforeSelect = explainConf.getBoolean(JDBC_EXPLAIN_KEYWORD_BEFORE_SELECT,
@@ -607,7 +605,7 @@ public class JDBCDriver implements LensDriver {
     }
     LOG.info("Explain Query : " + explainQuery);
     QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, null,
-      new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier());
+      new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false);
     QueryResult result = null;
     try {
       result = executeInternal(explainQueryCtx, explainQuery);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index 0bcbe7b..dc1ecba 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -30,6 +30,7 @@ import org.apache.lens.cube.parse.HQLParser;
 import org.apache.lens.server.api.LensConfConstants;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -50,8 +51,8 @@ import org.testng.annotations.Test;
  */
 public class TestColumnarSQLRewriter {
 
-  HiveConf conf = new HiveConf();
-  HiveConf queryConf = new HiveConf();
+  HiveConf hconf = new HiveConf();
+  Configuration conf = new Configuration();
   ColumnarSQLRewriter qtest = new ColumnarSQLRewriter();
 
   /**
@@ -218,20 +219,20 @@ public class TestColumnarSQLRewriter {
   // Testing multiple queries in one instance
   public void testNoRewrite() throws ParseException, SemanticException, LensException {
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
     String query = "select count(distinct id) from location_dim";
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select count( distinct  id ) from location_dim ";
     compareQueries(expected, actual);
 
     String query2 = "select count(distinct id) from location_dim  location_dim";
-    String actual2 = qtest.rewrite(query2, conf);
+    String actual2 = qtest.rewrite(query2, conf, hconf);
     String expected2 = "select count( distinct  id ) from location_dim location_dim___location_dim";
     compareQueries(expected2, actual2);
 
     String query3 = "select count(distinct location_dim.id) from  db.location_dim location_dim";
-    String actual3 = qtest.rewrite(query3, conf);
+    String actual3 = qtest.rewrite(query3, conf, hconf);
     String expected3 = "select count( distinct ( location_dim__db_location_dim_location_dim . id )) "
       + "from db.location_dim location_dim__db_location_dim_location_dim";
     compareQueries(expected3, actual3);
@@ -239,7 +240,7 @@ public class TestColumnarSQLRewriter {
     String query4 = "select count(distinct location_dim.id) from  db.location_dim location_dim "
       + "left outer join db.item_dim item_dim on location_dim.id = item_dim.id "
       + "right outer join time_dim time_dim on location_dim.id = time_dim.id ";
-    String actual4 = qtest.rewrite(query4, conf);
+    String actual4 = qtest.rewrite(query4, conf, hconf);
     String expected4 = "select count( distinct ( location_dim__db_location_dim_location_dim . id )) "
       + "from db.location_dim location_dim__db_location_dim_location_dim  left outer join db.item_dim "
       + "item_dim__db_item_dim_item_dim on (( location_dim__db_location_dim_location_dim . id ) = "
@@ -269,9 +270,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String rwq = qtest.rewrite(query, conf);
+    String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "inner join time_dim time_dim___time_dim on (( sales_fact___fact . time_key ) = "
       + "( time_dim___time_dim . time_key ))  inner join location_dim location_dim___location_dim on "
       + "((( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) "
@@ -301,9 +302,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String rwq = qtest.rewrite(query, conf);
+    String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "[(( location_dim___location_dim . location_name ) =  "
       + "'test123' ), , , ( time_dim___time_dim . time_key ) between  '2013-01-01'  and  '2013-01-31' "
       + ", , ( time_dim___time_dim . time_key ) between  '2013-01-01'  and  '2013-01-31' ]";
@@ -334,9 +335,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String rwq = qtest.rewrite(query, conf);
+    String rwq = qtest.rewrite(query, conf, hconf);
     String actual = qtest.aggColumn.toString();
     String expected = "[sum(( sales_fact___fact . dollars_sold )) as sum_sales_fact___fact_dollars_sold, "
       + "sum(( sales_fact___fact . units_sold )) as sum_sales_fact___fact_units_sold, "
@@ -368,9 +369,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String rwq = qtest.rewrite(query, conf);
+    String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "sales_fact___fact.time_key,sales_fact___fact.location_key,sales_fact___fact.item_key,";
     String actual = qtest.factKeys.toString();
     compareQueries(expected, actual);
@@ -398,9 +399,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' " + "and item_dim.item_name = 'item_1' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String rwq = qtest.rewrite(query, conf);
+    String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "sales_fact___fact.time_key in  (  select time_dim .time_key from time_dim where "
       + "( time_dim. time_key ) between  '2013-01-01'  and  '2013-01-31'  ) and sales_fact___fact.location_key in  "
       + "(  select location_dim .location_key from location_dim "
@@ -435,9 +436,9 @@ public class TestColumnarSQLRewriter {
         + "and item_dim.item_name = 'item_1' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by dollars_sold  ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
 
     String expected = "select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
       + "date(( time_dim___time_dim . day )), ( item_dim___item_dim . item_key ),  case  when "
@@ -506,9 +507,9 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-03-01' and '2013-03-05' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order by dollars_sold ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
       + "( time_dim___time_dim . day ),  case  when (sum(sum_sales_fact___fact_dollars_sold) =  0 ) then  "
       + "0.0  else sum(sum_sales_fact___fact_dollars_sold) end  dollars_sold  "
@@ -577,9 +578,9 @@ public class TestColumnarSQLRewriter {
       + "location_dim join time_dim time_dim on location_dim.time_id = time_dim.id "
       + "WHERE ( time_dim . full_date ) between  '2013-01-01 00:00:00'  and  '2013-01-04 00:00:00'  LIMIT 10 ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select  distinct ( location_dim___location_dim . id ) "
       + "from location_dim location_dim___location_dim  "
       + "inner join time_dim time_dim___time_dim on "
@@ -597,9 +598,9 @@ public class TestColumnarSQLRewriter {
       + "location_dim join time_dim time_dim on location_dim.time_id = time_dim.id "
       + "WHERE ( time_dim . full_date ) between  '2013-01-01 00:00:00'  and  '2013-01-04 00:00:00'  LIMIT 10 ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select sum(count_location_dim___location_dim_name) from  "
       + "(select location_dim___location_dim.time_id,count(( location_dim___location_dim . name )) as "
       + "count_location_dim___location_dim_name from location_dim location_dim___location_dim where "
@@ -622,9 +623,9 @@ public class TestColumnarSQLRewriter {
       + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
       + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order by dollars_sold desc ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ), ( time_dim___time_dim . day_of_week ), "
       + "( time_dim___time_dim . day ),  case  when (sum(sum_sales_fact__db_sales_fact_fact_dollars_sold) =  0 ) "
       + "then  0.0  else sum(sum_sales_fact__db_sales_fact_fact_dollars_sold) end  dollars_sold  from  "
@@ -660,9 +661,9 @@ public class TestColumnarSQLRewriter {
       + "WHERE ((dim1 . date) = '2014-11-25 00:00:00') "
       + "GROUP BY (dim1 . date),  (dim2 . name), (dim3 . name) , (dim4 . name) ";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) date , sum(sum_fact___f_msr1) msr1 , "
       + "( dim2___dim2 . name ) dim2_name , "
       + "( dim3___dim3 . name ) dim3_name , ( dim4___dim4 . name ) dim4_name "
@@ -690,9 +691,9 @@ public class TestColumnarSQLRewriter {
       + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not null "
       + "GROUP BY (dim1 . date),  (dim2 . name)";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) date , sum(sum_fact___f_msr1) msr1 , "
       + "( dim2___dim2 . name ) dim2_name  from  "
       + "(select fact___f.dim2_id, fact___f.dim1_id, fact___f.m4, fact___f.m3, fact___f.m2,sum(( fact___f . msr1 )) as "
@@ -719,9 +720,9 @@ public class TestColumnarSQLRewriter {
       + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not null "
       + "GROUP BY (dim1 . date),  (dim2 . name) ORDER BY dim1_date";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) dim1_date , sum(sum_fact___f_msr1) msr1 , "
       + "( dim2___dim2 . name ) dim2_name  from  (select fact___f.dim2_id, "
       + "fact___f.dim1_id, fact___f.m4, fact___f.m3, fact___f.m2,sum(( fact___f . msr1 )) "
@@ -749,9 +750,9 @@ public class TestColumnarSQLRewriter {
       + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not null "
       + "GROUP BY (dim1 . date),  (dim2 . name) ORDER BY dim1_date";
 
-    SessionState.start(conf);
+    SessionState.start(hconf);
 
-    String actual = qtest.rewrite(query, conf);
+    String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) dim1_date , sum(sum_fact___f_msr1) msr1 , "
       + "( dim2___dim2 . name ) dim2_name  from  "
       + "(select fact___f.dim2_id, fact___f.dim1_id, fact___f.dim3_id, fact___f.m4, fact___f.m2,sum((fact___f.msr1)) "
@@ -785,37 +786,34 @@ public class TestColumnarSQLRewriter {
     serdeUrls[0] = new URL("file:" + testJarFile.getAbsolutePath());
     serdeUrls[1] = new URL("file:" + serdeJarFile.getAbsolutePath());
 
-    URLClassLoader createTableClassLoader = new URLClassLoader(serdeUrls, conf.getClassLoader());
-    conf.setClassLoader(createTableClassLoader);
-    SessionState.start(conf);
+    URLClassLoader createTableClassLoader = new URLClassLoader(serdeUrls, hconf.getClassLoader());
+    hconf.setClassLoader(createTableClassLoader);
+    SessionState.start(hconf);
 
     // Create test table
     Database database = new Database();
     database.setName("mydb");
 
-    Hive.get(conf).createDatabase(database);
+    Hive.get(hconf).createDatabase(database);
     SessionState.get().setCurrentDatabase("mydb");
-    createTable(conf, "mydb", "mytable", "testDB", "testTable_1");
-    createTable(conf, "mydb", "mytable_2", "testDB", "testTable_2");
-    createTable(conf, "default", "mytable_3", "testDB", "testTable_3");
+    createTable(hconf, "mydb", "mytable", "testDB", "testTable_1");
+    createTable(hconf, "mydb", "mytable_2", "testDB", "testTable_2");
+    createTable(hconf, "default", "mytable_3", "testDB", "testTable_3");
 
     String query = "SELECT * FROM mydb.mytable t1 JOIN mytable_2 t2 ON t1.t2id = t2.id "
       + " left outer join default.mytable_3 t3 on t2.t3id = t3.id " + "WHERE A = 100";
 
-    // Test fails without setting this class loader as now metastore lookup is done using queryConf
-    queryConf.setClassLoader(createTableClassLoader);
-
     ColumnarSQLRewriter rewriter = new ColumnarSQLRewriter();
     rewriter.init(conf);
-    rewriter.ast = HQLParser.parseHQL(query);
+    rewriter.ast = HQLParser.parseHQL(query, hconf);
     rewriter.query = query;
-    rewriter.analyzeInternal();
+    rewriter.analyzeInternal(conf, hconf);
 
     String joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
     System.out.println(joinTreeBeforeRewrite);
 
     // Rewrite
-    rewriter.replaceWithUnderlyingStorage(queryConf, rewriter.fromAST);
+    rewriter.replaceWithUnderlyingStorage(hconf, rewriter.fromAST);
     String joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
     System.out.println("joinTreeAfterRewrite:" + joinTreeAfterRewrite);
 
@@ -833,17 +831,17 @@ public class TestColumnarSQLRewriter {
       && joinTreeAfterRewrite.contains("testtable_3"));
 
     // Rewrite one more query where table and db name is not set
-    createTable(conf, "mydb", "mytable_4", null, null);
+    createTable(hconf, "mydb", "mytable_4", null, null);
     String query2 = "SELECT * FROM mydb.mytable_4 WHERE a = 100";
-    rewriter.ast = HQLParser.parseHQL(query2);
+    rewriter.ast = HQLParser.parseHQL(query2, hconf);
     rewriter.query = query2;
-    rewriter.analyzeInternal();
+    rewriter.analyzeInternal(conf, hconf);
 
     joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
     System.out.println(joinTreeBeforeRewrite);
 
     // Rewrite
-    rewriter.replaceWithUnderlyingStorage(queryConf, rewriter.fromAST);
+    rewriter.replaceWithUnderlyingStorage(hconf, rewriter.fromAST);
     joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
     System.out.println(joinTreeAfterRewrite);
 
@@ -855,14 +853,14 @@ public class TestColumnarSQLRewriter {
     database = new Database();
     database.setName("examples");
     Hive.get().createDatabase(database);
-    createTable(conf, "examples", "mytable", "default", null);
+    createTable(hconf, "examples", "mytable", "default", null);
 
     String defaultQuery = "SELECT * FROM examples.mytable t1 WHERE A = 100";
-    rewriter.ast = HQLParser.parseHQL(defaultQuery);
+    rewriter.ast = HQLParser.parseHQL(defaultQuery, hconf);
     rewriter.query = defaultQuery;
-    rewriter.analyzeInternal();
+    rewriter.analyzeInternal(conf, hconf);
     joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
-    rewriter.replaceWithUnderlyingStorage(queryConf, rewriter.fromAST);
+    rewriter.replaceWithUnderlyingStorage(hconf, rewriter.fromAST);
     joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
     assertTrue(joinTreeBeforeRewrite.contains("examples"), joinTreeBeforeRewrite);
     assertFalse(joinTreeAfterRewrite.contains("examples"), joinTreeAfterRewrite);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
index 912e201..786fd34 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
@@ -70,6 +70,7 @@ public class TestJdbcDriver {
   /** The base conf. */
   Configuration baseConf;
 
+  HiveConf hConf;
   /** The driver. */
   JDBCDriver driver;
 
@@ -88,6 +89,7 @@ public class TestJdbcDriver {
     baseConf.set(JDBCDriverConfConstants.JDBC_USER, "SA");
     baseConf.set(JDBCDriverConfConstants.JDBC_PASSWORD, "");
     baseConf.set(JDBCDriverConfConstants.JDBC_EXPLAIN_KEYWORD_PARAM, "explain plan for ");
+    hConf = new HiveConf(baseConf, this.getClass());
 
     driver = new JDBCDriver();
     driver.configure(baseConf);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
----------------------------------------------------------------------
diff --git a/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java b/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
index 0d441c8..5958c41 100644
--- a/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
+++ b/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
@@ -226,7 +226,7 @@ public abstract class TestAbstractFileFormatter {
       Assert.fail(e.getMessage());
     }
     QueryContext ctx = QueryContext.createContextWithSingleDriver("test writer query", "testuser", new LensConf(),
-        conf, mockDriver, null);
+        conf, mockDriver, null, false);
 
     ctx.setSelectedDriver(mockDriver);
     formatter = createFormatter();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 0f68cb9..3101ed6 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -37,6 +37,7 @@ import org.apache.lens.server.api.util.LensUtil;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 
 import lombok.Getter;
 import lombok.Setter;
@@ -63,6 +64,11 @@ public abstract class AbstractQueryContext implements Serializable {
   protected transient Configuration conf;
 
   /**
+   * The hive Conf.
+   */
+  protected transient HiveConf hiveConf;
+
+  /**
    * The query conf.
    */
   @Getter
@@ -107,13 +113,13 @@ public abstract class AbstractQueryContext implements Serializable {
   private boolean olapQuery = false;
 
   protected AbstractQueryContext(final String query, final String user, final LensConf qconf, final Configuration conf,
-    final Collection<LensDriver> drivers) {
+    final Collection<LensDriver> drivers, boolean mergeDriverConf) {
     if (conf.getBoolean(LensConfConstants.ENABLE_QUERY_METRICS, LensConfConstants.DEFAULT_ENABLE_QUERY_METRICS)) {
       UUID metricId = UUID.randomUUID();
       conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, metricId.toString());
       LOG.info("Generated metric id: " + metricId + " for query: " + query);
     }
-    driverContext = new DriverSelectorQueryContext(query, conf, drivers);
+    driverContext = new DriverSelectorQueryContext(query, conf, drivers, mergeDriverConf);
     userQuery = query;
     this.lensConf = qconf;
     this.conf = conf;
@@ -321,6 +327,26 @@ public abstract class AbstractQueryContext implements Serializable {
     return driverContext.driverQueryContextMap.get(driver).getDriverQueryRewriteError();
   }
 
+  /**
+   * Gets HiveConf corresponding to query conf.
+   *
+   * Should be called judiciously, because constructing HiveConf from conf object is costly.
+   * @return
+   */
+  public synchronized HiveConf getHiveConf() {
+    if (hiveConf == null) {
+      hiveConf = new HiveConf(this.conf, this.getClass());
+      hiveConf.setClassLoader(this.conf.getClassLoader());
+    }
+    return hiveConf;
+  }
+
+  /**
+   * Set final driver rewritten query for the driver.
+   *
+   * @param driver
+   * @param rewrittenQuery
+   */
   public void setFinalDriverQuery(LensDriver driver, String rewrittenQuery) {
     driverContext.driverQueryContextMap.get(driver).setFinalDriverQuery(rewrittenQuery);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
index 088076e..b53c4b9 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
@@ -59,10 +59,19 @@ public class DriverSelectorQueryContext {
   DriverQueryContext>();
 
   public DriverSelectorQueryContext(final String userQuery, final Configuration queryConf,
-      final Collection<LensDriver> drivers) {
+    final Collection<LensDriver> drivers) {
+    this(userQuery, queryConf, drivers, true);
+  }
+
+  public DriverSelectorQueryContext(final String userQuery, final Configuration queryConf,
+    final Collection<LensDriver> drivers, boolean mergeConf) {
     for (LensDriver driver : drivers) {
       DriverQueryContext ctx = new DriverQueryContext(driver);
-      ctx.setDriverSpecificConf(mergeConf(driver, queryConf));
+      if (mergeConf) {
+        ctx.setDriverSpecificConf(mergeConf(driver, queryConf));
+      } else {
+        ctx.setDriverSpecificConf(queryConf);
+      }
       String metricId = ctx.driverSpecificConf.get(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
       if (!StringUtils.isBlank(metricId)) {
         ctx.driverSpecificConf.set(LensConfConstants.QUERY_METRIC_DRIVER_STACK_NAME,
@@ -218,7 +227,7 @@ public class DriverSelectorQueryContext {
 
     if (driverQueryCtxs.get(getSelectedDriver()).getDriverQueryPlanGenerationError() != null) {
       throw new LensException("Driver Query Plan of the selected driver is null",
-          driverQueryCtxs.get(getSelectedDriver()).getDriverQueryPlanGenerationError());
+        driverQueryCtxs.get(getSelectedDriver()).getDriverQueryPlanGenerationError());
     }
     return driverQueryCtxs.get(getSelectedDriver()).getDriverQueryPlan();
   }
@@ -244,7 +253,7 @@ public class DriverSelectorQueryContext {
 
     if (driverQueryCtxs.get(getSelectedDriver()).getDriverQueryCostEstimateError() != null) {
       throw new LensException("Driver Query Cost of the selected driver is null",
-          driverQueryCtxs.get(getSelectedDriver()).getDriverQueryCostEstimateError());
+        driverQueryCtxs.get(getSelectedDriver()).getDriverQueryCostEstimateError());
     }
     return driverQueryCtxs.get(getSelectedDriver()).getDriverCost();
   }
@@ -284,17 +293,17 @@ public class DriverSelectorQueryContext {
 
   public DriverQueryPlan getDriverQueryPlan(LensDriver driver) {
     return driverQueryContextMap.get(driver) != null
-        ? driverQueryContextMap.get(driver).getDriverQueryPlan() : null;
+      ? driverQueryContextMap.get(driver).getDriverQueryPlan() : null;
   }
 
   public Configuration getDriverConf(LensDriver driver) {
     return driverQueryContextMap.get(driver) != null
-        ? driverQueryContextMap.get(driver).getDriverSpecificConf() : null;
+      ? driverQueryContextMap.get(driver).getDriverSpecificConf() : null;
   }
 
   public String getDriverQuery(LensDriver driver) {
     return driverQueryContextMap.get(driver) != null
-        ? driverQueryContextMap.get(driver).getQuery() : null;
+      ? driverQueryContextMap.get(driver).getQuery() : null;
   }
 
   public String getFinalDriverQuery(LensDriver driver) {
@@ -304,7 +313,7 @@ public class DriverSelectorQueryContext {
 
   public QueryCost getDriverQueryCost(LensDriver driver) {
     return driverQueryContextMap.get(driver) != null
-        ? driverQueryContextMap.get(driver).getDriverCost() : null;
+      ? driverQueryContextMap.get(driver).getDriverCost() : null;
   }
 
   public void setDriverQueryPlan(LensDriver driver, DriverQueryPlan qp) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/ExplainQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/ExplainQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/ExplainQueryContext.java
index 0a0cfbd..da44955 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/ExplainQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/ExplainQueryContext.java
@@ -39,6 +39,6 @@ public class ExplainQueryContext extends AbstractQueryContext {
    */
   public ExplainQueryContext(String query, final String user, LensConf conf, Configuration qconf,
       Collection<LensDriver> drivers) {
-    super(query, user, conf, qconf, drivers);
+    super(query, user, conf, qconf, drivers, true);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
index 9dce099..ffda81b 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
@@ -186,7 +186,8 @@ public class FinishedLensQuery {
   }
 
   public QueryContext toQueryContext(Configuration conf, Collection<LensDriver> drivers) throws LensException {
-    QueryContext qctx = new QueryContext(userQuery, submitter, new LensConf(), conf, drivers, null, submissionTime);
+    QueryContext qctx = new QueryContext(userQuery, submitter, new LensConf(), conf, drivers, null, submissionTime,
+      false);
     qctx.setQueryHandle(QueryHandle.fromString(handle));
     qctx.setEndTime(getEndTime());
     qctx.setStatusSkippingTransitionTest(new QueryStatus(0.0, QueryStatus.Status.valueOf(getStatus()),

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
index 49d01d2..c49567a 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
@@ -93,7 +93,7 @@ public class PreparedQueryContext extends AbstractQueryContext implements Delaye
    */
   public PreparedQueryContext(String query, String user, Configuration conf, LensConf qconf, Collection<LensDriver>
     drivers) {
-    super(query, user, qconf, conf, drivers);
+    super(query, user, qconf, conf, drivers, false);
     this.preparedTime = new Date();
     this.preparedUser = user;
     this.prepareHandle = new QueryPrepareHandle(UUID.randomUUID());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
index ac34760..202ee1e 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
@@ -166,7 +166,7 @@ public class QueryContext extends AbstractQueryContext implements Comparable<Que
    * @param conf  the conf
    */
   public QueryContext(String query, String user, LensConf qconf, Configuration conf, Collection<LensDriver> drivers) {
-    this(query, user, qconf, conf, drivers, null);
+    this(query, user, qconf, conf, drivers, null, true);
   }
 
   /**
@@ -179,7 +179,7 @@ public class QueryContext extends AbstractQueryContext implements Comparable<Que
    */
   public QueryContext(PreparedQueryContext prepared, String user, LensConf qconf, Configuration conf) {
     this(prepared.getUserQuery(), user, qconf, mergeConf(prepared.getConf(), conf), prepared.getDriverContext()
-      .getDriverQueryContextMap().keySet(), prepared.getDriverContext().getSelectedDriver());
+      .getDriverQueryContextMap().keySet(), prepared.getDriverContext().getSelectedDriver(), true);
     setDriverContext(prepared.getDriverContext());
     setSelectedDriverQuery(prepared.getSelectedDriverQuery());
   }
@@ -195,8 +195,8 @@ public class QueryContext extends AbstractQueryContext implements Comparable<Que
    * @param selectedDriver SelectedDriver
    */
   private QueryContext(String userQuery, String user, LensConf qconf, Configuration conf,
-      Collection<LensDriver> drivers, LensDriver selectedDriver) {
-    this(userQuery, user, qconf, conf, drivers, selectedDriver, System.currentTimeMillis());
+      Collection<LensDriver> drivers, LensDriver selectedDriver, boolean mergeDriverConf) {
+    this(userQuery, user, qconf, conf, drivers, selectedDriver, System.currentTimeMillis(), mergeDriverConf);
   }
   /**
    * Instantiates a new query context.
@@ -210,8 +210,8 @@ public class QueryContext extends AbstractQueryContext implements Comparable<Que
    * @param submissionTime the submission time
    */
   QueryContext(String userQuery, String user, LensConf qconf, Configuration conf,
-    Collection<LensDriver> drivers, LensDriver selectedDriver, long submissionTime) {
-    super(userQuery, user, qconf, conf, drivers);
+    Collection<LensDriver> drivers, LensDriver selectedDriver, long submissionTime, boolean mergeDriverConf) {
+    super(userQuery, user, qconf, conf, drivers, mergeDriverConf);
     this.submissionTime = submissionTime;
     this.queryHandle = new QueryHandle(UUID.randomUUID());
     this.status = new QueryStatus(0.0f, Status.NEW, "Query just got created", false, null, null);
@@ -243,8 +243,8 @@ public class QueryContext extends AbstractQueryContext implements Comparable<Que
    * @return QueryContext object
    */
   public static QueryContext createContextWithSingleDriver(String query, String user, LensConf qconf,
-      Configuration conf, LensDriver driver, String lensSessionPublicId) {
-    QueryContext ctx = new QueryContext(query, user, qconf, conf, Lists.newArrayList(driver), driver);
+      Configuration conf, LensDriver driver, String lensSessionPublicId, boolean mergeDriverConf) {
+    QueryContext ctx = new QueryContext(query, user, qconf, conf, Lists.newArrayList(driver), driver, mergeDriverConf);
     ctx.setLensSessionIdentifier(lensSessionPublicId);
     return ctx;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryRewriter.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryRewriter.java
index 3b38813..20b6e7f 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryRewriter.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryRewriter.java
@@ -22,6 +22,7 @@ package org.apache.lens.server.api.query;
 import org.apache.lens.api.LensException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 
 /**
  * The Interface QueryRewriter.
@@ -30,13 +31,15 @@ public interface QueryRewriter {
 
   /**
    * Rewrite.
-   *
-   * @param queryConf the query configuration
    * @param query     the query
+   * @param queryConf the query configuration
+   * @param metastoreConf The metastore configuration. If rewriters requires to access metastore, this configuration
+   *  needs to passed
+   *
    * @return the string
    * @throws LensException the lens exception
    */
-  String rewrite(String query, Configuration queryConf) throws LensException;
+  String rewrite(String query, Configuration queryConf, HiveConf metastoreConf) throws LensException;
 
   /**
    * Set conf for the rewriter

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
index 078988b..5a1ab66 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
@@ -31,6 +31,6 @@ public class MockQueryContext extends AbstractQueryContext {
 
   public MockQueryContext(final String query, final LensConf qconf,
     final Configuration conf, final Collection<LensDriver> drivers) {
-    super(query, "testuser", qconf, conf, drivers);
+    super(query, "testuser", qconf, conf, drivers, false);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index f9f6645..e3bfed8 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -1869,7 +1869,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     qconf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     QueryContext addQuery = QueryContext.createContextWithSingleDriver(command,
       getSession(sessionHandle).getLoggedInUser(), qconf, getLensConf(
-        sessionHandle, qconf), driver, sessionHandle.getPublicId().toString());
+        sessionHandle, qconf), driver, sessionHandle.getPublicId().toString(), true);
     return addQuery;
   }
 


[2/2] incubator-lens git commit: LENS-408 : Creating HiveConf multiple times unnecessarily should be avoided (amareshwari)

Posted by am...@apache.org.
LENS-408 : Creating HiveConf multiple times unnecessarily should be avoided (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/51c52eb3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/51c52eb3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/51c52eb3

Branch: refs/heads/master
Commit: 51c52eb3e8b51db35fa4c233c797f68c50b24479
Parents: 8578c8f
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Mon Mar 16 12:54:22 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Mar 16 12:54:22 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/AggregateResolver.java      |   4 +-
 .../lens/cube/parse/CandidateTableResolver.java |   8 +-
 .../lens/cube/parse/CubeQueryContext.java       |  44 ++------
 .../lens/cube/parse/CubeQueryRewriter.java      |  32 ++++--
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |  13 ++-
 .../cube/parse/DenormalizationResolver.java     |   4 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |   2 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |  19 +++-
 .../apache/lens/cube/parse/JoinResolver.java    |  15 +--
 .../lens/cube/parse/TimerangeResolver.java      |   2 +-
 .../apache/lens/driver/cube/RewriteUtil.java    |  19 ++--
 .../apache/lens/cube/parse/CubeTestSetup.java   |   2 +-
 .../lens/cube/parse/TestCubeRewriter.java       |  53 +++------
 .../apache/lens/cube/parse/TestHQLParser.java   |  65 +++++------
 .../lens/cube/parse/TestJoinResolver.java       |  30 ++---
 .../lens/cube/parse/TestQueryRewrite.java       |   4 +-
 .../lens/cube/parse/TestTimeRangeExtractor.java |   3 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  59 +++-------
 .../apache/lens/driver/cube/TestRewriting.java  |  54 ++++-----
 .../driver/hive/EmbeddedThriftConnection.java   |   6 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  36 +++---
 .../driver/hive/RemoteThriftConnection.java     |   9 +-
 .../lens/driver/hive/ThriftConnection.java      |  11 +-
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |  45 ++++----
 .../org/apache/lens/driver/jdbc/JDBCDriver.java |  14 +--
 .../driver/jdbc/TestColumnarSQLRewriter.java    | 110 +++++++++----------
 .../apache/lens/driver/jdbc/TestJdbcDriver.java |   2 +
 .../lib/query/TestAbstractFileFormatter.java    |   2 +-
 .../server/api/query/AbstractQueryContext.java  |  30 ++++-
 .../api/query/DriverSelectorQueryContext.java   |  25 +++--
 .../server/api/query/ExplainQueryContext.java   |   2 +-
 .../server/api/query/FinishedLensQuery.java     |   3 +-
 .../server/api/query/PreparedQueryContext.java  |   2 +-
 .../lens/server/api/query/QueryContext.java     |  16 +--
 .../lens/server/api/query/QueryRewriter.java    |   9 +-
 .../lens/server/api/query/MockQueryContext.java |   2 +-
 .../server/query/QueryExecutionServiceImpl.java |   2 +-
 37 files changed, 389 insertions(+), 369 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index cd40a75..a25fae6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -58,7 +58,7 @@ class AggregateResolver implements ContextRewriter {
     }
 
     boolean nonDefaultAggregates = false;
-    boolean aggregateResolverDisabled = cubeql.getHiveConf().getBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER,
+    boolean aggregateResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER,
       CubeQueryConfUtil.DEFAULT_DISABLE_AGGREGATE_RESOLVER);
     // Check if the query contains measures
     // 1. not inside default aggregate expressions
@@ -95,7 +95,7 @@ class AggregateResolver implements ContextRewriter {
 
     resolveClause(cubeql, cubeql.getHavingAST());
 
-    Configuration distConf = cubeql.getHiveConf();
+    Configuration distConf = cubeql.getConf();
     boolean isDimOnlyDistinctEnabled = distConf.getBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT,
       CubeQueryConfUtil.DEFAULT_ATTR_FIELDS_ADD_DISTINCT);
     if (isDimOnlyDistinctEnabled) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 871ce65..9ee1667 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -54,7 +54,7 @@ class CandidateTableResolver implements ContextRewriter {
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
     qlEnabledMultiTableSelect =
-      cubeql.getHiveConf().getBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT,
+      cubeql.getConf().getBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT,
         CubeQueryConfUtil.DEFAULT_MULTI_TABLE_SELECT);
     if (checkForQueriedColumns) {
       LOG.debug("Dump queried columns:" + cubeql.getTblAliasToColumns());
@@ -200,7 +200,7 @@ class CandidateTableResolver implements ContextRewriter {
 
   private void resolveCandidateFactTables(CubeQueryContext cubeql) throws SemanticException {
     if (cubeql.getCube() != null) {
-      String str = cubeql.getHiveConf().get(CubeQueryConfUtil.getValidFactTablesKey(cubeql.getCube().getName()));
+      String str = cubeql.getConf().get(CubeQueryConfUtil.getValidFactTablesKey(cubeql.getCube().getName()));
       List<String> validFactTables =
         StringUtils.isBlank(str) ? null : Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
       Set<String> queriedDimAttrs = cubeql.getQueriedDimAttrs();
@@ -229,7 +229,7 @@ class CandidateTableResolver implements ContextRewriter {
         for (String col : queriedDimAttrs) {
           if (!cfact.getColumns().contains(col.toLowerCase())) {
             // check if it available as reference, if not remove the candidate
-            if (!cubeql.getDenormCtx().addRefUsage(cfact, col, cubeql.getCube().getName())) {
+            if (!cubeql.getDeNormCtx().addRefUsage(cfact, col, cubeql.getCube().getName())) {
               LOG.info("Not considering fact table:" + cfact + " as column " + col + " is not available");
               cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.columnNotFound(col));
               toRemove = true;
@@ -510,7 +510,7 @@ class CandidateTableResolver implements ContextRewriter {
               if (!cdim.getColumns().contains(col.toLowerCase())) {
                 // check if it available as reference, if not remove the
                 // candidate
-                if (!cubeql.getDenormCtx().addRefUsage(cdim, col, dim.getName())) {
+                if (!cubeql.getDeNormCtx().addRefUsage(cdim, col, dim.getName())) {
                   LOG.info("Not considering dimtable:" + cdim + " as column " + col + " is not available");
                   cubeql.addDimPruningMsgs(dim, cdim.getTable(), CandidateTablePruneCause.columnNotFound(col));
                   i.remove();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 96e6f34..df5a212 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -31,6 +31,7 @@ import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCo
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -52,7 +53,8 @@ public class CubeQueryContext {
   @Getter
   private final QB qb;
   private String clauseName = null;
-  private final HiveConf conf;
+  @Getter
+  private final Configuration conf;
 
   @Getter
   private final List<TimeRange> timeRanges;
@@ -79,7 +81,6 @@ public class CubeQueryContext {
   private final Set<Set<CandidateFact>> candidateFactSets = new HashSet<Set<CandidateFact>>();
 
   // would be added through join chains and de-normalized resolver
-  @Getter
   protected Map<Dimension, OptionalDimCtx> optionalDimensions = new HashMap<Dimension, OptionalDimCtx>();
 
   // Alias to table object mapping of tables accessed in this query
@@ -128,8 +129,10 @@ public class CubeQueryContext {
   @Getter
   private CubeMetastoreClient metastoreClient;
   @Getter
+  @Setter
   private JoinResolver.AutoJoinContext autoJoinCtx;
   @Getter
+  @Setter
   private DenormalizationResolver.DenormalizationContext deNormCtx;
   @Getter
   private PruneCauses<CubeFactTable> factPruningMsgs =
@@ -138,14 +141,15 @@ public class CubeQueryContext {
   private Map<Dimension, PruneCauses<CubeDimensionTable>> dimPruningMsgs =
     new HashMap<Dimension, PruneCauses<CubeDimensionTable>>();
 
-  public CubeQueryContext(ASTNode ast, QB qb, HiveConf conf) throws SemanticException {
+  public CubeQueryContext(ASTNode ast, QB qb, Configuration queryConf, HiveConf metastoreConf)
+    throws SemanticException {
     this.ast = ast;
     this.qb = qb;
-    this.conf = conf;
+    this.conf = queryConf;
     this.clauseName = getClause();
     this.timeRanges = new ArrayList<TimeRange>();
     try {
-      metastoreClient = CubeMetastoreClient.getInstance(conf);
+      metastoreClient = CubeMetastoreClient.getInstance(metastoreConf);
     } catch (HiveException e) {
       throw new SemanticException(e);
     }
@@ -359,10 +363,6 @@ public class CubeQueryContext {
     return clauseName;
   }
 
-  public QB getQB() {
-    return qb;
-  }
-
   public Set<CandidateFact> getCandidateFactTables() {
     return candidateFacts;
   }
@@ -956,30 +956,6 @@ public class CubeQueryContext {
     }
   }
 
-  public List<TimeRange> getTimeRanges() {
-    return timeRanges;
-  }
-
-  public HiveConf getHiveConf() {
-    return conf;
-  }
-
-  public void setAutoJoinCtx(JoinResolver.AutoJoinContext autoJoinCtx) {
-    this.autoJoinCtx = autoJoinCtx;
-  }
-
-  public JoinResolver.AutoJoinContext getAutoJoinCtx() {
-    return autoJoinCtx;
-  }
-
-  public void setDenormCtx(DenormalizationResolver.DenormalizationContext deNormCtx) {
-    this.deNormCtx = deNormCtx;
-  }
-
-  public DenormalizationResolver.DenormalizationContext getDenormCtx() {
-    return this.deNormCtx;
-  }
-
   public Set<Dimension> getOptionalDimensions() {
     return optionalDimensions.keySet();
   }
@@ -996,7 +972,7 @@ public class CubeQueryContext {
   }
 
   public boolean shouldReplaceTimeDimWithPart() {
-    return getHiveConf().getBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL,
+    return getConf().getBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL,
       CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 3e66157..1833cde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -30,21 +30,24 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import lombok.extern.slf4j.Slf4j;
+
 /**
  * Rewrites given cube query into simple storage table HQL.
  */
+@Slf4j
 public class CubeQueryRewriter {
   private final Configuration conf;
   private final List<ContextRewriter> rewriters = new ArrayList<ContextRewriter>();
   private final HiveConf hconf;
-  private Context ctx = null;
+  private Context qlCtx = null;
   private boolean lightFactFirst;
 
-  public CubeQueryRewriter(Configuration conf) {
+  public CubeQueryRewriter(Configuration conf, HiveConf hconf) {
     this.conf = conf;
-    hconf = new HiveConf(conf, HiveConf.class);
+    this.hconf = hconf;
     try {
-      ctx = new Context(hconf);
+      qlCtx = new Context(conf);
     } catch (IOException e) {
       // IOException is ignorable
     }
@@ -162,8 +165,8 @@ public class CubeQueryRewriter {
   }
 
   public CubeQueryContext rewrite(ASTNode astnode) throws SemanticException {
-    CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(hconf);
-    analyzer.analyze(astnode, ctx);
+    CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(conf, hconf);
+    analyzer.analyze(astnode, qlCtx);
     CubeQueryContext ctx = analyzer.getQueryContext();
     rewrite(rewriters, ctx);
     return ctx;
@@ -174,7 +177,7 @@ public class CubeQueryRewriter {
       command = command.replace("\n", "");
     }
     ParseDriver pd = new ParseDriver();
-    ASTNode tree = pd.parse(command, ctx, false);
+    ASTNode tree = pd.parse(command, qlCtx, false);
     tree = ParseUtils.findRootNonNullToken(tree);
     return rewrite(tree);
   }
@@ -188,7 +191,7 @@ public class CubeQueryRewriter {
        * Adding iteration number as part of gauge name since some rewriters are have more than one phase, and having
        * iter number gives the idea which iteration the rewriter was run
        */
-      MethodMetricsContext mgauge = MethodMetricsFactory.createMethodGauge(ctx.getHiveConf(), true,
+      MethodMetricsContext mgauge = MethodMetricsFactory.createMethodGauge(ctx.getConf(), true,
         rewriter.getClass().getCanonicalName() + ITER_STR + i);
       rewriter.rewriteContext(ctx);
       mgauge.markSuccess();
@@ -197,6 +200,17 @@ public class CubeQueryRewriter {
   }
 
   public Context getQLContext() {
-    return ctx;
+    return qlCtx;
+  }
+
+  public void clear() {
+    try {
+      if (qlCtx != null) {
+        qlCtx.clear();
+      }
+    } catch (IOException e) {
+      log.info("Ignoring exception in clearing qlCtx:", e);
+      // ignoring exception in clear
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
index 4cf9bbd..68bffed 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
@@ -22,6 +22,7 @@ package org.apache.lens.cube.parse;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.*;
@@ -30,13 +31,15 @@ import org.apache.hadoop.hive.ql.parse.*;
  * Accepts cube query AST and rewrites into storage table query
  */
 public class CubeSemanticAnalyzer extends SemanticAnalyzer {
-  private final HiveConf conf;
+  private final Configuration queryConf;
+  private final HiveConf hiveConf;
   private final List<ValidationRule> validationRules = new ArrayList<ValidationRule>();
   private CubeQueryContext cubeQl;
 
-  public CubeSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
-    this.conf = conf;
+  public CubeSemanticAnalyzer(Configuration queryConf, HiveConf hiveConf) throws SemanticException {
+    super(hiveConf);
+    this.queryConf = queryConf;
+    this.hiveConf = hiveConf;
     setupRules();
   }
 
@@ -70,7 +73,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
       // if phase1Result false return
       return;
     }
-    cubeQl = new CubeQueryContext(ast, qb, conf);
+    cubeQl = new CubeQueryContext(ast, qb, queryConf, hiveConf);
     // cubeQl.init();
     // validate();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 81d91eb..2d239e1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -320,11 +320,11 @@ public class DenormalizationResolver implements ContextRewriter {
    */
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
-    DenormalizationContext denormCtx = cubeql.getDenormCtx();
+    DenormalizationContext denormCtx = cubeql.getDeNormCtx();
     if (denormCtx == null) {
       // Adds all the reference dimensions as eligible for denorm fields
       denormCtx = new DenormalizationContext(cubeql);
-      cubeql.setDenormCtx(denormCtx);
+      cubeql.setDeNormCtx(denormCtx);
       for (Map.Entry<String, Set<String>> entry : cubeql.getTblAliasToColumns().entrySet()) {
         // skip default alias
         if (entry.getKey() == CubeQueryContext.DEFAULT_TABLE) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index ecf0f3b..d83db18 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -62,7 +62,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
   protected void setMissingExpressions() throws SemanticException {
     setFrom(getFromString());
     setWhere(joinWithAnd(
-      getQuery().getHiveConf().getBoolean(
+      getQuery().getConf().getBoolean(
         CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
         ? getPostSelectionWhereClause() : null,
       genWhereClauseWithDimPartitions(where)));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 5e9bc2d..25e6353 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -111,16 +111,25 @@ public final class HQLParser {
     return ARITHMETIC_OPERATORS.contains(tokenType);
   }
 
-  public static ASTNode parseHQL(String query) throws ParseException {
+  public static ASTNode parseHQL(String query, HiveConf conf) throws ParseException {
     ParseDriver driver = new ParseDriver();
     ASTNode tree = null;
+    Context ctx = null;
     try {
-      tree = driver.parse(query, new Context(new HiveConf()));
+      ctx = new Context(conf);
+      tree = driver.parse(query, ctx);
+      tree = ParseUtils.findRootNonNullToken(tree);
     } catch (IOException e) {
       throw new RuntimeException(e);
+    } finally {
+      if (ctx != null) {
+        try {
+          ctx.clear();
+        } catch (IOException e) {
+          // ignoring exception in clear
+        }
+      }
     }
-    tree = ParseUtils.findRootNonNullToken(tree);
-    // printAST(tree);
     return tree;
   }
 
@@ -575,7 +584,7 @@ public final class HQLParser {
   }
 
   public static void main(String[] args) throws Exception {
-    ASTNode ast = parseHQL("select * from default_table ");
+    ASTNode ast = parseHQL("select * from default_table ", new HiveConf());
 
     printAST(getHiveTokenMapping(), ast, 0, 0);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 44c474a..ae7886b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -31,7 +31,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
@@ -929,7 +928,6 @@ class JoinResolver implements ContextRewriter {
   private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
   private boolean partialJoinChain;
   private AbstractCubeTable target;
-  private HiveConf conf;
   private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
 
 
@@ -941,7 +939,6 @@ class JoinResolver implements ContextRewriter {
     partialJoinConditions = new HashMap<AbstractCubeTable, String>();
     tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
     try {
-      conf = cubeql.getHiveConf();
       resolveJoins(cubeql);
     } catch (HiveException e) {
       throw new SemanticException(e);
@@ -949,9 +946,9 @@ class JoinResolver implements ContextRewriter {
   }
 
   private void resolveJoins(CubeQueryContext cubeql) throws HiveException {
-    QB cubeQB = cubeql.getQB();
+    QB cubeQB = cubeql.getQb();
     boolean joinResolverDisabled =
-      conf.getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
+      cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
     if (joinResolverDisabled) {
       if (cubeql.getJoinTree() != null) {
         cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
@@ -990,14 +987,14 @@ class JoinResolver implements ContextRewriter {
     processJoinChains(cubeql);
     Set<Dimension> dimensions = cubeql.getNonChainedDimensions();
     // Add dimensions specified in the partial join tree
-    ASTNode joinClause = cubeql.getQB().getParseInfo().getJoinExpr();
+    ASTNode joinClause = cubeql.getQb().getParseInfo().getJoinExpr();
     if (joinClause == null) {
       // Only cube in the query
       if (cubeql.hasCubeInQuery()) {
         target = (AbstractCubeTable) cubeql.getCube();
       } else {
-        String targetDimAlias = cubeql.getQB().getTabAliases().iterator().next();
-        String targetDimTable = cubeql.getQB().getTabNameForAlias(targetDimAlias);
+        String targetDimAlias = cubeql.getQb().getTabAliases().iterator().next();
+        String targetDimTable = cubeql.getQb().getTabNameForAlias(targetDimAlias);
         if (targetDimTable == null) {
           LOG.warn("Null table for alias " + targetDimAlias);
         }
@@ -1100,7 +1097,7 @@ class JoinResolver implements ContextRewriter {
     }
     AutoJoinContext joinCtx =
       new AutoJoinContext(multipleJoinPaths, cubeql.optionalDimensions, partialJoinConditions, partialJoinChain,
-        tableJoinTypeMap, target, conf.get(CubeQueryConfUtil.JOIN_TYPE_KEY), true);
+        tableJoinTypeMap, target, cubeql.getConf().get(CubeQueryConfUtil.JOIN_TYPE_KEY), true);
     cubeql.setAutoJoinCtx(joinCtx);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 9cea231..c73b7ff 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -163,7 +163,7 @@ class TimerangeResolver implements ContextRewriter {
 
     // Look at referenced columns through denormalization resolver
     // and do column life validation
-    Map<String, Set<ReferencedQueriedColumn>> refCols = cubeql.getDenormCtx().getReferencedCols();
+    Map<String, Set<ReferencedQueriedColumn>> refCols = cubeql.getDeNormCtx().getReferencedCols();
     for (String col : refCols.keySet()) {
       Iterator<ReferencedQueriedColumn> refColIter = refCols.get(col).iterator();
       while (refColIter.hasNext()) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
index b905f70..de79423 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
@@ -35,6 +35,7 @@ import org.apache.lens.server.api.metrics.MethodMetricsFactory;
 import org.apache.lens.server.api.query.AbstractQueryContext;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
@@ -83,8 +84,8 @@ public final class RewriteUtil {
    * @throws SemanticException the semantic exception
    * @throws ParseException    the parse exception
    */
-  static List<CubeQueryInfo> findCubePositions(String query) throws SemanticException, ParseException {
-    ASTNode ast = HQLParser.parseHQL(query);
+  static List<CubeQueryInfo> findCubePositions(String query, HiveConf conf) throws SemanticException, ParseException {
+    ASTNode ast = HQLParser.parseHQL(query, conf);
     LOG.debug("User query AST:" + ast.dump());
     List<CubeQueryInfo> cubeQueries = new ArrayList<CubeQueryInfo>();
     findCubePositions(ast, cubeQueries, query);
@@ -188,8 +189,8 @@ public final class RewriteUtil {
    * @return the rewriter
    * @throws SemanticException the semantic exception
    */
-  static CubeQueryRewriter getCubeRewriter(Configuration queryConf) throws SemanticException {
-    return new CubeQueryRewriter(queryConf);
+  static CubeQueryRewriter getCubeRewriter(Configuration queryConf, HiveConf hconf) throws SemanticException {
+    return new CubeQueryRewriter(queryConf, hconf);
   }
 
   /**
@@ -226,17 +227,17 @@ public final class RewriteUtil {
           driverQueries.put(driver, replacedQuery);
         }
       } else {
-        List<RewriteUtil.CubeQueryInfo> cubeQueries = findCubePositions(replacedQuery);
+        List<RewriteUtil.CubeQueryInfo> cubeQueries = findCubePositions(replacedQuery, ctx.getHiveConf());
         for (LensDriver driver : ctx.getDriverContext().getDrivers()) {
           MethodMetricsContext rewriteGauge = MethodMetricsFactory.createMethodGauge(ctx.getDriverConf(driver), true,
             REWRITE_QUERY_GAUGE);
           StringBuilder builder = new StringBuilder();
           int start = 0;
+          CubeQueryRewriter rewriter = null;
           try {
-            CubeQueryRewriter rewriter = null;
             if (cubeQueries.size() > 0) {
               // avoid creating rewriter if there are no cube queries
-              rewriter = getCubeRewriter(ctx.getDriverContext().getDriverConf(driver));
+              rewriter = getCubeRewriter(ctx.getDriverContext().getDriverConf(driver), ctx.getHiveConf());
               ctx.setOlapQuery(true);
             }
             for (RewriteUtil.CubeQueryInfo cqi : cubeQueries) {
@@ -270,6 +271,10 @@ public final class RewriteUtil {
             if (failureCause == null) {
               failureCause = e.getLocalizedMessage();
             }
+          } finally {
+            if (rewriter != null) {
+              rewriter.clear();
+            }
           }
           rewriteGauge.markSuccess();
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 3fbdee9..cdf3da6 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -2053,7 +2053,7 @@ public class CubeTestSetup {
   public static void printQueryAST(String query, String label) throws ParseException {
     System.out.println("--" + label + "--AST--");
     System.out.println("--query- " + query);
-    HQLParser.printAST(HQLParser.parseHQL(query));
+    HQLParser.printAST(HQLParser.parseHQL(query, new HiveConf()));
   }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 0669074..4278229 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -1325,8 +1325,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     final String query = "SELECT cycledim1.name, msr2 FROM testCube where " + TWO_DAYS_RANGE;
     try {
-      CubeQueryRewriter rewriter = new CubeQueryRewriter(testConf);
-      CubeQueryContext context = rewriter.rewrite(query);
+      CubeQueryContext context = rewriteCtx(query, testConf);
       System.out.println("TestJoinPathTimeRange: " + context.toHQL());
       Assert.fail("Expected query to fail because of invalid column life");
     } catch (SemanticException exc) {
@@ -1347,20 +1346,15 @@ public class TestCubeRewriter extends TestQueryRewrite {
         col.getCost());
     cube.alterDimension(newDim2);
     client.alterCube(cubeName, cube);
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(testConf);
-    CubeQueryContext context = rewriter.rewrite(query);
-    String hql = context.toHQL();
+    String hql = rewrite(query, testConf);
     Assert.assertNotNull(hql);
   }
 
   @Test
   public void testCubeQueryWithSpaceInAlias() throws Exception {
     String query = "SELECT sum(msr2) as `a measure` from testCube where " + TWO_DAYS_RANGE;
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(getConf());
     try {
-      HQLParser.printAST(HQLParser.parseHQL(query));
-      CubeQueryContext ctx = rewriter.rewrite(query);
-      String hql = ctx.toHQL();
+      String hql = rewrite(query, getConf());
       Assert.assertNotNull(hql);
       // test that quotes are preserved
       Assert.assertTrue(hql.contains("`a measure`"));
@@ -1385,7 +1379,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3,C4");
     hconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, true);
 
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter rewriter = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext context = rewriter.rewrite(query);
     String hql = context.toHQL();
     System.out.println("@@" + hql);
@@ -1401,7 +1395,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     // Rewrite with setting disabled
     hconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
-    rewriter = new CubeQueryRewriter(hconf);
+    rewriter = new CubeQueryRewriter(hconf, hconf);
     context = rewriter.rewrite(query);
     hql = context.toHQL();
     System.out.println("@@2 " + hql);
@@ -1411,12 +1405,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testAliasNameSameAsColumnName() throws Exception {
     String query = "SELECT msr2 as msr2 from testCube WHERE " + TWO_DAYS_RANGE;
-    HQLParser.printAST(HQLParser.parseHQL(query));
-    HiveConf hiveConf = new HiveConf(getConf(), TestCubeRewriter.class);
     try {
-      CubeQueryRewriter rewriter = new CubeQueryRewriter(hiveConf);
-      CubeQueryContext ctx = rewriter.rewrite(query);
-      String hql = ctx.toHQL();
+      String hql = rewrite(query, getConf());
       Assert.assertNotNull(hql);
       System.out.println("@@HQL " + hql);
     } catch (NullPointerException npe) {
@@ -1428,29 +1418,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testDimAttributeQueryWithFact() throws Exception {
     String query = "select count (distinct dim1) from testCube where " + TWO_DAYS_RANGE;
-    HiveConf conf = new HiveConf(getConf(), TestCubeRewriter.class);
-    CubeQueryRewriter cubeQueryRewriter = new CubeQueryRewriter(conf);
-    CubeQueryContext ctx = cubeQueryRewriter.rewrite(query);
-    String rewrittenQuery = ctx.toHQL();
-    System.out.println("##testDimAttributeQueryWithFact " + rewrittenQuery);
-    Assert.assertTrue(rewrittenQuery.contains("summary1"));
+    String hql = rewrite(query, getConf());
+    Assert.assertTrue(hql.contains("summary1"));
   }
 
   @Test
   public void testSelectDimonlyJoinOnCube() throws Exception {
     String query = "SELECT count (distinct citydim.name) from testCube where " + TWO_DAYS_RANGE;
-    HiveConf conf = new HiveConf(getConf(), TestCubeRewriter.class);
+    Configuration conf = new Configuration(getConf());
     conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    try {
-      CubeQueryRewriter rewriter = new CubeQueryRewriter(conf);
-      CubeQueryContext context = rewriter.rewrite(query);
-      String hql = context.toHQL();
-      System.out.println("@@ HQL = " + hql);
-      Assert.assertNotNull(hql);
-    } catch (Exception exc) {
-      exc.printStackTrace();
-      Assert.fail("Query should be rewritten successfully.");
-    }
+    String hql = rewrite(query, conf);
+    System.out.println("@@ HQL = " + hql);
+    Assert.assertNotNull(hql);
   }
 
   @Test
@@ -1478,9 +1457,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS,
       AbridgedTimeRangeWriter.class.asSubclass(TimeRangeWriter.class), TimeRangeWriter.class);
 
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(conf);
-    CubeQueryContext context = rewriter.rewrite(query);
-    String hqlWithInClause = context.toHQL();
+    String hqlWithInClause = rewrite(query, conf);
     System.out.println("@@ HQL with IN and OR: " + hqlWithInClause);
 
     // Run explain on this command, it should pass successfully.
@@ -1496,9 +1473,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     largeConf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS,
       AbridgedTimeRangeWriter.class.asSubclass(TimeRangeWriter.class), TimeRangeWriter.class);
 
-    CubeQueryRewriter largePartQueryRewriter = new CubeQueryRewriter(largeConf);
-    CubeQueryContext largePartQueryContext = largePartQueryRewriter.rewrite(largePartQuery);
-    String largePartRewrittenQuery = largePartQueryContext.toHQL();
+    String largePartRewrittenQuery = rewrite(largePartQuery, largeConf);
     CommandProcessorResponse response = runExplain(largePartRewrittenQuery, largeConf);
     Assert.assertNotNull(response);
     Assert.assertTrue(largePartRewrittenQuery.contains("in"));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
index cf8c57b..bb332dc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
@@ -28,10 +29,11 @@ import org.testng.Assert;
 import org.testng.annotations.Test;
 
 public class TestHQLParser {
+  HiveConf conf = new HiveConf();
   @Test
   public void testGroupByOrderByGetString() throws Exception {
     String query = "SELECT a,b, sum(c) FROM tab GROUP BY a,f(b), d+e ORDER BY a, g(b), e/100";
-    ASTNode node = HQLParser.parseHQL(query);
+    ASTNode node = HQLParser.parseHQL(query, conf);
 
     ASTNode groupby = HQLParser.findNodeByPath(node, TOK_INSERT, TOK_GROUPBY);
     String expected = "a , f( b ), ( d  +  e )";
@@ -47,7 +49,7 @@ public class TestHQLParser {
   public void testLiteralCaseIsPreserved() throws Exception {
     String literalQuery = "SELECT 'abc' AS col1, 'DEF' AS col2 FROM foo where col3='GHI' " + "AND col4 = 'JKLmno'";
 
-    ASTNode tree = HQLParser.parseHQL(literalQuery);
+    ASTNode tree = HQLParser.parseHQL(literalQuery, conf);
 
     ASTNode select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select).trim();
@@ -65,7 +67,7 @@ public class TestHQLParser {
     String query = "SELECT  " + "CASE (col1 * 100)/200 + 5 " + "WHEN 'ABC' THEN 'def' " + "WHEN 'EFG' THEN 'hij' "
       + "ELSE 'XyZ' " + "END AS ComplexCaseStatement FROM FOO";
 
-    ASTNode tree = HQLParser.parseHQL(query);
+    ASTNode tree = HQLParser.parseHQL(query, conf);
     ASTNode select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause ");
@@ -77,7 +79,7 @@ public class TestHQLParser {
     String q2 = "SELECT " + "CASE WHEN col1 = 'abc' then 'def' " + "when col1 = 'ghi' then 'jkl' "
       + "else 'none' END AS Complex_Case_Statement_2" + " from FOO";
 
-    tree = HQLParser.parseHQL(q2);
+    tree = HQLParser.parseHQL(q2, conf);
     select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause 2");
@@ -88,7 +90,7 @@ public class TestHQLParser {
     String q3 = "SELECT  " + "CASE (col1 * 100)/200 + 5 " + "WHEN 'ABC' THEN 'def' " + "WHEN 'EFG' THEN 'hij' "
       + "END AS ComplexCaseStatement FROM FOO";
 
-    tree = HQLParser.parseHQL(q3);
+    tree = HQLParser.parseHQL(q3, conf);
     select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause ");
@@ -99,7 +101,7 @@ public class TestHQLParser {
     String q4 = "SELECT " + "CASE WHEN col1 = 'abc' then 'def' " + "when col1 = 'ghi' then 'jkl' "
       + "END AS Complex_Case_Statement_2" + " from FOO";
 
-    tree = HQLParser.parseHQL(q4);
+    tree = HQLParser.parseHQL(q4, conf);
     select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause 2");
@@ -112,7 +114,7 @@ public class TestHQLParser {
   @Test
   public void testIsNullCondition() throws Exception {
     String q1 = "SELECT * FROM FOO WHERE col1 IS NULL";
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("col1  is null", whereStr.trim());
@@ -121,7 +123,7 @@ public class TestHQLParser {
   @Test
   public void testIsNotNullCondition() throws Exception {
     String q1 = "SELECT * FROM FOO WHERE col1 IS NOT NULL";
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("col1  is not null", whereStr.trim());
@@ -130,7 +132,7 @@ public class TestHQLParser {
   @Test
   public void testBetweenCondition() throws Exception {
     String q1 = "SELECT * FROM FOO WHERE col1 BETWEEN 10 AND 100";
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("col1  between  10  and  100", whereStr.trim());
@@ -139,7 +141,7 @@ public class TestHQLParser {
   @Test
   public void testNotBetweenCondition() throws Exception {
     String q1 = "SELECT * FROM FOO WHERE col1 NOT BETWEEN 10 AND 100";
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("col1  not between  10  and  100", whereStr.trim());
@@ -150,7 +152,7 @@ public class TestHQLParser {
     String q1 = "SELECT * FROM FOO WHERE " + "(A <=> 10) AND (B & C = 10) AND (D | E = 10) "
       + "AND (F ^ G = 10) AND (H % 2 = 1) AND  (~I = 10)" + "AND (!J) AND (NOT K) AND TRUE AND FALSE";
 
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     String expected = "(((((((((( a  <=>  10 ) and (( b  &  c ) =  10 )) "
       + "and (( d  |  e ) =  10 )) and (( f  ^  g ) =  10 )) "
@@ -163,7 +165,7 @@ public class TestHQLParser {
   public void testCompelxTypeOperators() throws Exception {
     String q1 = "SELECT A[2], B['key'], C.D FROM FOO";
 
-    ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_SELECT);
+    ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
     Assert.assertEquals("a [ 2 ],  b [ 'key' ], ( c . d )", selectStr.trim());
@@ -172,13 +174,13 @@ public class TestHQLParser {
   @Test
   public void testInAndNotInOperator() throws Exception {
     String q1 = "SELECT * FROM FOO WHERE A IN ('B', 'C', 'D', 'E', 'F')";
-    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("a  in ( 'B'  ,  'C'  ,  'D'  ,  'E'  ,  'F' )", whereStr.trim());
 
     q1 = "SELECT * FROM FOO WHERE A NOT IN ('B', 'C', 'D', 'E', 'F')";
-    where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1), TOK_INSERT, TOK_WHERE);
+    where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
     Assert.assertEquals("a  not  in ( 'B'  ,  'C'  ,  'D'  ,  'E'  ,  'F' )", whereStr.trim());
@@ -188,40 +190,40 @@ public class TestHQLParser {
   public void testOrderbyBrackets() throws Exception {
     String query = "SELECT id from citytable order by ((citytable.id) asc)";
     // String hql = rewrite(driver, query);
-    ASTNode tree = HQLParser.parseHQL(query);
+    ASTNode tree = HQLParser.parseHQL(query, conf);
     ASTNode orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     String reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED0:" + reconstructed);
     // Assert.assertEquals("(( citytable  .  id ) asc )", reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed);
+    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
 
     String query2 = "SELECT id from citytable order by (citytable.id asc)";
-    tree = HQLParser.parseHQL(query2);
+    tree = HQLParser.parseHQL(query2, conf);
     orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED1:" + reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed);
+    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
 
     String query3 = "SELECT id, name from citytable order by citytable.id asc, citytable.name desc";
-    tree = HQLParser.parseHQL(query3);
+    tree = HQLParser.parseHQL(query3, conf);
     orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED2:" + reconstructed);
-    HQLParser.parseHQL("SELECT id, name FROM citytable ORDER BY " + reconstructed);
+    HQLParser.parseHQL("SELECT id, name FROM citytable ORDER BY " + reconstructed, conf);
 
     String query4 = "SELECT id from citytable order by citytable.id";
-    tree = HQLParser.parseHQL(query4);
+    tree = HQLParser.parseHQL(query4, conf);
     orderByTree = HQLParser.findNodeByPath(tree, TOK_INSERT, HiveParser.TOK_ORDERBY);
     reconstructed = HQLParser.getString(orderByTree);
     System.out.println("RECONSTRUCTED3:" + reconstructed);
-    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed);
+    HQLParser.parseHQL("SELECT citytable.id FROM citytable ORDER BY " + reconstructed, conf);
   }
 
   @Test
   public void testInnerJoin() throws Exception {
     String query
       = "select tab1.a, tab2.b from table1 tab1 inner join table tab2 on tab1.id = tab2.id where tab1.a > 123";
-    ASTNode node = HQLParser.parseHQL(query);
+    ASTNode node = HQLParser.parseHQL(query, conf);
     ASTNode temp = HQLParser.findNodeByPath(node, TOK_FROM, TOK_JOIN);
     String expected = " table1  tab1  table  tab2 (( tab1 . id ) = ( tab2 . id ))";
     Assert.assertEquals(expected, HQLParser.getString(temp));
@@ -231,7 +233,7 @@ public class TestHQLParser {
   public void testAliasWithSpaces() throws Exception {
     String query = "select id as `an id` from sample_dim";
     try {
-      ASTNode tree = HQLParser.parseHQL(query);
+      ASTNode tree = HQLParser.parseHQL(query, conf);
     } catch (NullPointerException exc) {
       exc.printStackTrace();
       Assert.fail("should not have thrown npe");
@@ -241,27 +243,27 @@ public class TestHQLParser {
   @Test
   public void testAllColumns() throws Exception {
     String query = "select * from tab";
-    ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(query), TOK_INSERT, TOK_SELECT);
+    ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(query, conf), TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
     Assert.assertEquals(" * ", selectStr);
 
     query = "select tab.*, tab2.a, tab2.b from tab";
-    ASTNode ast = HQLParser.parseHQL(query);
+    ASTNode ast = HQLParser.parseHQL(query, conf);
     select = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
     Assert.assertEquals(" tab . * , ( tab2 . a ), ( tab2 . b )", selectStr);
 
     query = "select count(*) from tab";
-    ast = HQLParser.parseHQL(query);
+    ast = HQLParser.parseHQL(query, conf);
     select = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
     Assert.assertEquals(" count(*) ", selectStr);
 
     query = "select count(tab.*) from tab";
-    ast = HQLParser.parseHQL(query);
+    ast = HQLParser.parseHQL(query, conf);
     select = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
@@ -271,14 +273,14 @@ public class TestHQLParser {
   @Test
   public void testNegativeLiteral() throws Exception {
     String query1 = "select 2-1 as col1,col2 from table1";
-    ASTNode tree = HQLParser.parseHQL(query1);
+    ASTNode tree = HQLParser.parseHQL(query1, conf);
     ASTNode selectAST = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     HQLParser.printAST(selectAST);
     String genQuery = HQLParser.getString(selectAST);
     System.out.println("genQuery1: " + genQuery);
 
     String query2 = "select -1 as col1,col2 from table1";
-    tree = HQLParser.parseHQL(query2);
+    tree = HQLParser.parseHQL(query2, conf);
     selectAST = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     HQLParser.printAST(selectAST);
     String genQuery2 = HQLParser.getString(selectAST);
@@ -288,7 +290,8 @@ public class TestHQLParser {
     Assert.assertTrue(genQuery2.contains("-  1"));
 
     // Validate returned string is parseable
-    HQLParser.printAST(HQLParser.findNodeByPath(HQLParser.parseHQL("SELECT " + genQuery2 + " FROM table1"), TOK_INSERT,
+    HQLParser.printAST(HQLParser.findNodeByPath(HQLParser.parseHQL("SELECT " + genQuery2 + " FROM table1", conf),
+      TOK_INSERT,
       TOK_SELECT));
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index f7bd3b8..df594bd 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -158,7 +158,7 @@ public class TestJoinResolver extends TestQueryRewrite {
   public void testAutoJoinResolver() throws Exception {
     // Test 1 Cube + dim
     String query = "select citydim.name, testDim2.name, testDim4.name, msr2 from testCube where " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext rewrittenQuery = driver.rewrite(query);
     String hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);
@@ -217,7 +217,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       "SELECT citydim.name, testDim4.name, msr2 "
         + "FROM testCube left outer join citydim ON citydim.name = 'FOOBAR'"
         + " right outer join testDim4 on testDim4.name='TESTDIM4NAME'" + " WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext rewrittenQuery = driver.rewrite(query);
     String hql = rewrittenQuery.toHQL();
     System.out.println("testPartialJoinResolver Partial join hql: " + hql);
@@ -239,7 +239,7 @@ public class TestJoinResolver extends TestQueryRewrite {
   @Test
   public void testJoinNotRequired() throws Exception {
     String query = "SELECT msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext ctx = driver.rewrite(query);
     Assert.assertTrue(ctx.getAutoJoinCtx() == null);
   }
@@ -247,7 +247,7 @@ public class TestJoinResolver extends TestQueryRewrite {
   @Test
   public void testJoinWithoutCondition() throws Exception {
     String query = "SELECT citydim.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext ctx = driver.rewrite(query);
     String hql = ctx.toHQL();
     String joinClause = getAutoResolvedFromString(ctx);
@@ -261,7 +261,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     HiveConf tConf = new HiveConf(hconf);
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
     System.out.println("@@Set join type to " + hconf.get(CubeQueryConfUtil.JOIN_TYPE_KEY));
-    CubeQueryRewriter driver = new CubeQueryRewriter(tConf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(tConf, hconf);
     String query = "select citydim.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
     CubeQueryContext ctx = driver.rewrite(query);
     String hql = ctx.toHQL();
@@ -272,7 +272,7 @@ public class TestJoinResolver extends TestQueryRewrite {
 
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "FULLOUTER");
     System.out.println("@@Set join type to " + hconf.get(CubeQueryConfUtil.JOIN_TYPE_KEY));
-    driver = new CubeQueryRewriter(tConf);
+    driver = new CubeQueryRewriter(tConf, hconf);
     ctx = driver.rewrite(query);
     hql = ctx.toHQL();
     System.out.println("testJoinTypeConf@@Resolved join clause2 - " + getAutoResolvedFromString(ctx));
@@ -286,7 +286,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     HiveConf tConf = new HiveConf(hconf);
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
     String query = "select c.name, t.msr2 FROM testCube t join citydim c WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(tConf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(tConf, hconf);
     CubeQueryContext ctx = driver.rewrite(query);
     String hql = ctx.toHQL();
     System.out.println("testPreserveTableAlias@@HQL:" + hql);
@@ -306,7 +306,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "INNER");
     String query = "select citydim.name, statedim.name from citydim limit 10";
     HiveConf dimOnlyConf = new HiveConf(tConf);
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(dimOnlyConf);
+    CubeQueryRewriter rewriter = new CubeQueryRewriter(dimOnlyConf, hconf);
     CubeQueryContext ctx = rewriter.rewrite(query);
     String hql = ctx.toHQL();
     System.out.println("testDimOnlyQuery@@@HQL:" + hql);
@@ -320,7 +320,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     ctx = rewriter.rewrite(queryWithJoin);
     hql = ctx.toHQL();
     System.out.println("testDimOnlyQuery@@@HQL2:" + hql);
-    HQLParser.parseHQL(hql);
+    HQLParser.parseHQL(hql, tConf);
     Assert.assertEquals(getDbName() + "c1_citytable citydim inner join " + getDbName()
         + "c1_statetable statedim on citydim.stateid = statedim.id and (statedim.dt = 'latest')",
       getAutoResolvedFromString(ctx).trim());
@@ -331,7 +331,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     String q = "SELECT citydim.name, statedim.name FROM citydim";
     HiveConf conf = new HiveConf(hconf);
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(conf);
+    CubeQueryRewriter rewriter = new CubeQueryRewriter(conf, hconf);
     CubeQueryContext context = rewriter.rewrite(q);
     String hql = context.toHQL();
     System.out.println("##1 hql " + hql);
@@ -342,7 +342,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     Assert.assertTrue(hql.matches(".*?WHERE\\W+citydim.dt = 'latest'\\W+.*?"));
 
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "RIGHTOUTER");
-    rewriter = new CubeQueryRewriter(conf);
+    rewriter = new CubeQueryRewriter(conf, hconf);
     context = rewriter.rewrite(q);
     hql = context.toHQL();
     System.out.println("##2 hql " + hql);
@@ -353,7 +353,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     Assert.assertTrue(hql.matches(".*?WHERE\\W+statedim.dt = 'latest'\\W+.*?"));
 
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "FULLOUTER");
-    rewriter = new CubeQueryRewriter(conf);
+    rewriter = new CubeQueryRewriter(conf, hconf);
     context = rewriter.rewrite(q);
     hql = context.toHQL();
     System.out.println("##3 hql " + hql);
@@ -567,7 +567,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     List<String> expectedClauses = new ArrayList<String>();
     List<String> actualClauses = new ArrayList<String>();
     String dimOnlyQuery = "select testDim2.name, testDim2.cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf);
+    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext rewrittenQuery = driver.rewrite(dimOnlyQuery);
     String hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);
@@ -591,7 +591,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     //Dim only join chain query without qualified tableName for join chain ref column
     actualClauses.clear();
     dimOnlyQuery = "select name, cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
-    driver = new CubeQueryRewriter(hconf);
+    driver = new CubeQueryRewriter(hconf, hconf);
     rewrittenQuery = driver.rewrite(dimOnlyQuery);
     hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);
@@ -611,7 +611,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     //With ChainRef.col
     actualClauses.clear();
     dimOnlyQuery = "select testDim2.name, cityState.capital FROM testDim2 where " + TWO_DAYS_RANGE;
-    driver = new CubeQueryRewriter(hconf);
+    driver = new CubeQueryRewriter(hconf, hconf);
     rewrittenQuery = driver.rewrite(dimOnlyQuery);
     hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
index a1451b9..c4449da 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
@@ -40,7 +40,7 @@ import lombok.extern.slf4j.Slf4j;
 public abstract class TestQueryRewrite {
 
   private static CubeTestSetup setup;
-  private static HiveConf hconf = new HiveConf(TestJoinResolver.class);
+  private static HiveConf hconf = new HiveConf(TestQueryRewrite.class);
 
   @BeforeSuite
   public static void setup() throws Exception {
@@ -68,7 +68,7 @@ public abstract class TestQueryRewrite {
 
   protected CubeQueryContext rewriteCtx(String query, Configuration conf) throws SemanticException, ParseException {
     log.info("User query: {}", query);
-    CubeQueryRewriter driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
+    CubeQueryRewriter driver = new CubeQueryRewriter(conf, hconf);
     return driver.rewrite(query);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
index 84c4c44..f19221b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
@@ -25,6 +25,7 @@ import java.util.List;
 
 import org.apache.lens.cube.metadata.TestCubeMetastoreClient;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ParseException;
@@ -42,7 +43,7 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @BeforeTest
   public void setupInstance() throws Exception {
-    driver = new CubeQueryRewriter(new HiveConf());
+    driver = new CubeQueryRewriter(new Configuration(), new HiveConf());
     dateTwoDaysBack = getDateUptoHours(TWODAYS_BACK);
     dateNow = getDateUptoHours(NOW);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index 8f7da7d..b5aaeb2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -29,9 +29,7 @@ import java.util.Map;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
@@ -41,7 +39,6 @@ import org.testng.annotations.Test;
 public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
   private Configuration conf;
-  private CubeQueryRewriter driver;
   private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
 
   @BeforeTest
@@ -54,14 +51,6 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS,
       BetweenTimeRangeWriter.class.asSubclass(TimeRangeWriter.class), TimeRangeWriter.class);
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
-  }
-
-  private CubeQueryContext rewrittenQuery;
-
-  private String rewrite(CubeQueryRewriter driver, String query) throws SemanticException, ParseException {
-    rewrittenQuery = driver.rewrite(query);
-    return rewrittenQuery.toHQL();
   }
 
   private Date getOneLess(Date in, int calField) {
@@ -84,7 +73,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
   public void testCubeQuery() throws Exception {
     SemanticException th = null;
     try {
-      rewrite(driver, "cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE);
+      rewrite("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
     } catch (SemanticException e) {
       th = e;
       e.printStackTrace();
@@ -96,8 +85,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     }
     // hourly partitions for two days
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
-    String hqlQuery = rewrite(driver, "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE);
+    String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(
       CubeTestSetup.getDbName() + "c1_testfact2",
@@ -109,8 +97,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     // multiple range query
     hqlQuery =
-      rewrite(driver, "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR "
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS);
+      rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR "
+        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, conf);
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
@@ -127,8 +115,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     // format option in the query
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
-    hqlQuery = rewrite(driver, "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE);
+    hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     whereClauses = new HashMap<String, String>();
     whereClauses.put(CubeTestSetup.getDbName() + "c1_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
       "dt", getUptoHour(CubeTestSetup.TWODAYS_BACK),
@@ -145,12 +132,11 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3,C4");
     conf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
 
     String query =
       "SELECT test_time_dim, msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite(driver, query);
+    String hqlQuery = rewrite(query, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
       "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
@@ -162,23 +148,21 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
         " GROUP BY hourdim.full_hour", null, whereClauses);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     query =
       "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    hqlQuery = rewrite(driver, query);
+    hqlQuery = rewrite(query, conf);
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
         + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", null, null, null, whereClauses);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     query =
       "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
         + "') and testcube.cityid != 5";
-    hqlQuery = rewrite(driver, query);
+    hqlQuery = rewrite(query, conf);
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -187,14 +171,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
     // multiple range query
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     hqlQuery =
       rewrite(
-        driver,
         "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
           + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
           + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')");
+          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", conf);
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
@@ -212,13 +194,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     hqlQuery =
-      rewrite(driver,
+      rewrite(
         "select to_date(test_time_dim), SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
           + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
           + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')");
+          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", conf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(hourdim.full_hour), sum(testcube.msr2) FROM ", " join "
@@ -235,12 +216,11 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3,C4");
     conf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
 
     String query =
       "SELECT test_time_dim2, msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite(driver, query);
+    String hqlQuery = rewrite(query, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact", TestBetweenTimeRangeWriter.getBetweenClause("timechain",
       "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
@@ -252,23 +232,21 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
         " GROUP BY timechain.full_hour", null, whereClauses);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     query =
       "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    hqlQuery = rewrite(driver, query);
+    hqlQuery = rewrite(query, conf);
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
         + "c4_hourDimTbl timechain on testcube.test_time_dim_hour_id2  = timechain.id", null, null, null, whereClauses);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     query =
       "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim2, '"
         + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
         + "') and testcube.cityid != 5";
-    hqlQuery = rewrite(driver, query);
+    hqlQuery = rewrite(query, conf);
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -277,14 +255,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
     // multiple range query
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     hqlQuery =
       rewrite(
-        driver,
         "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
           + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
           + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')");
+          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", conf);
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
@@ -302,13 +278,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    driver = new CubeQueryRewriter(new HiveConf(conf, HiveConf.class));
     hqlQuery =
-      rewrite(driver,
+      rewrite(
         "select to_date(test_time_dim2), SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
           + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
           + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')");
+          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", conf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(timechain.full_hour), sum(testcube.msr2) FROM ", " join "

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/51c52eb3/lens-cube/src/test/java/org/apache/lens/driver/cube/TestRewriting.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/driver/cube/TestRewriting.java b/lens-cube/src/test/java/org/apache/lens/driver/cube/TestRewriting.java
index 6e758a6..00a0397 100644
--- a/lens-cube/src/test/java/org/apache/lens/driver/cube/TestRewriting.java
+++ b/lens-cube/src/test/java/org/apache/lens/driver/cube/TestRewriting.java
@@ -37,6 +37,7 @@ import org.apache.lens.server.api.metrics.LensMetricsRegistry;
 import org.apache.lens.server.api.query.QueryContext;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -74,6 +75,7 @@ public class TestRewriting {
   public IObjectFactory getObjectFactory() {
     return new PowerMockObjectFactory();
   }
+  private HiveConf hconf = new HiveConf();
 
   static int i = 0;
   // number of successful queries through mock rewriter
@@ -117,7 +119,7 @@ public class TestRewriting {
   private CubeQueryContext getMockedCubeContext(String query) throws SemanticException, ParseException {
     CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
     Mockito.when(context.toHQL()).thenReturn(query.substring(4));
-    Mockito.when(context.toAST(any(Context.class))).thenReturn(HQLParser.parseHQL(query.substring(4)));
+    Mockito.when(context.toAST(any(Context.class))).thenReturn(HQLParser.parseHQL(query.substring(4), hconf));
     return context;
   }
 
@@ -167,7 +169,7 @@ public class TestRewriting {
     PowerMockito.stub(PowerMockito.method(RewriteUtil.class, "getCubeRewriter")).toReturn(mockWriter);
     String q1 = "select name from table";
     Assert.assertFalse(RewriteUtil.isCubeQuery(q1));
-    List<RewriteUtil.CubeQueryInfo> cubeQueries = RewriteUtil.findCubePositions(q1);
+    List<RewriteUtil.CubeQueryInfo> cubeQueries = RewriteUtil.findCubePositions(q1, hconf);
     Assert.assertEquals(cubeQueries.size(), 0);
     QueryContext ctx = new QueryContext(q1, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
@@ -176,7 +178,7 @@ public class TestRewriting {
     driver.configure(conf);
     String q2 = "cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -190,7 +192,7 @@ public class TestRewriting {
 
     q2 = "insert overwrite directory '/tmp/rewrite' cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -198,7 +200,7 @@ public class TestRewriting {
 
     q2 = "insert overwrite local directory '/tmp/rewrite' cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -206,7 +208,7 @@ public class TestRewriting {
 
     q2 = "insert overwrite local directory '/tmp/example-output' cube select id,name from dim_table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select id,name from dim_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -214,7 +216,7 @@ public class TestRewriting {
 
     q2 = "explain cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -222,7 +224,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table) a";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -230,7 +232,7 @@ public class TestRewriting {
 
     q2 = "insert overwrite directory '/tmp/rewrite' select * from (cube select name from table) a";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -238,7 +240,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table)a";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -246,7 +248,7 @@ public class TestRewriting {
 
     q2 = "select * from  (  cube select name from table   )     a";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -255,7 +257,7 @@ public class TestRewriting {
     q2 = "select * from (      cube select name from table where"
       + " (name = 'ABC'||name = 'XYZ')&&(key=100)   )       a";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2));
+    cubeQueries = RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2), hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from"
       + " table where (name = 'ABC' OR name = 'XYZ') AND (key=100)");
@@ -264,7 +266,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table) a join (cube select" + " name2 from table2) b";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
@@ -274,7 +276,7 @@ public class TestRewriting {
     q2 = "select * from (cube select name from table) a full outer join"
       + " (cube select name2 from table2) b on a.name=b.name2";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
@@ -283,7 +285,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table) a join (select name2 from table2) b";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -291,7 +293,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table union all cube select name2 from table2) u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 2);
@@ -301,7 +303,7 @@ public class TestRewriting {
     q2 = "insert overwrite directory '/tmp/rewrite' "
       + "select * from (cube select name from table union all cube select name2 from table2) u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 2);
@@ -310,7 +312,7 @@ public class TestRewriting {
 
     q2 = "select u.* from (select name from table    union all       cube select name2 from table2)   u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -318,7 +320,7 @@ public class TestRewriting {
 
     q2 = "select u.* from (select name from table union all cube select name2 from table2)u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
@@ -327,7 +329,7 @@ public class TestRewriting {
     q2 = "select * from (cube select name from table union all cube select name2"
       + " from table2 union all cube select name3 from table3) u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 3);
@@ -338,7 +340,7 @@ public class TestRewriting {
     q2 = "select * from   (     cube select name from table    union all   cube"
       + " select name2 from table2   union all  cube select name3 from table3 )  u";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 3);
@@ -348,7 +350,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table union all cube select" + " name2 from table2) u group by u.name";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
@@ -357,7 +359,7 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table union all cube select" + " name2 from table2)  u group by u.name";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 2);
@@ -366,7 +368,7 @@ public class TestRewriting {
 
     q2 = "create table temp1 as cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 1);
@@ -375,7 +377,7 @@ public class TestRewriting {
     q2 = "create table temp1 as select * from (cube select name from table union all cube select"
       + " name2 from table2)  u group by u.name";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 2);
@@ -385,7 +387,7 @@ public class TestRewriting {
     q2 = "create table temp1 as cube select name from table where"
       + " time_range_in('dt', '2014-06-24-23', '2014-06-25-00')";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
-    cubeQueries = RewriteUtil.findCubePositions(q2);
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     RewriteUtil.rewriteQuery(ctx);
     Assert.assertEquals(cubeQueries.size(), 1);