You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ai...@apache.org on 2016/04/25 22:32:13 UTC

[1/2] hive git commit: HIVE-13424: Refactoring the code to pass a QueryState object rather than HiveConf object (Reviewed by Sergey Shelukhin)

Repository: hive
Updated Branches:
  refs/heads/master 86bdcbcd3 -> caa3ec761


http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 93b7a66..c13a404 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -190,7 +190,7 @@ public class ProcessAnalyzeTable implements NodeProcessor {
     // partial scan task
     DriverContext driverCxt = new DriverContext();
     Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, parseContext.getConf());
-    partialScanTask.initialize(parseContext.getConf(), null, driverCxt,
+    partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt,
         tableScan.getCompilationOpContext());
     partialScanTask.setWork(scanWork);
     statsWork.setSourceTask(partialScanTask);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 9af7749..11fd2c7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -72,6 +72,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
@@ -340,8 +341,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     int nextNum;
   }
 
-  public SemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public SemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
     opToPartPruner = new HashMap<TableScanOperator, ExprNodeDesc>();
     opToPartList = new HashMap<TableScanOperator, PrunedPartitionList>();
     opToSamplePruner = new HashMap<TableScanOperator, SampleDesc>();
@@ -442,7 +443,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
   public ParseContext getParseContext() {
     // Make sure the basic query properties are initialized
     copyInfoToQueryProperties(queryProperties);
-    return new ParseContext(conf, opToPartPruner, opToPartList, topOps,
+    return new ParseContext(queryState, opToPartPruner, opToPartList, topOps,
         new HashSet<JoinOperator>(joinContext.keySet()),
         new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
@@ -1197,18 +1198,18 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     createTable.addChild(temporary);
     createTable.addChild(cte.cteNode);
 
-    SemanticAnalyzer analyzer = new SemanticAnalyzer(conf);
+    SemanticAnalyzer analyzer = new SemanticAnalyzer(queryState);
     analyzer.initCtx(ctx);
     analyzer.init(false);
 
     // should share cte contexts
     analyzer.aliasToCTEs.putAll(aliasToCTEs);
 
-    HiveOperation operation = SessionState.get().getHiveOperation();
+    HiveOperation operation = queryState.getHiveOperation();
     try {
       analyzer.analyzeInternal(createTable);
     } finally {
-      SessionState.get().setCommandType(operation);
+      queryState.setCommandType(operation);
     }
 
     Table table = analyzer.tableDesc.toTable(conf);
@@ -6977,7 +6978,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     if (ltd != null && SessionState.get() != null) {
       SessionState.get().getLineageState()
           .mapDirToFop(ltd.getSourcePath(), (FileSinkOperator) output);
-    } else if ( SessionState.get().getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) {
+    } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) {
 
       Path tlocation = null;
       String tName = Utilities.getDbTableName(tableDesc.getTableName())[1];
@@ -9340,7 +9341,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
             limit.intValue(), extraMRStep);
         qb.getParseInfo().setOuterQueryLimit(limit.intValue());
       }
-      if (!SessionState.get().getHiveOperation().equals(HiveOperation.CREATEVIEW)) {
+      if (!queryState.getHiveOperation().equals(HiveOperation.CREATEVIEW)) {
         curr = genFileSinkPlan(dest, qb, curr);
       }
     }
@@ -10345,7 +10346,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         return true;
       }
     } else {
-      SessionState.get().setCommandType(HiveOperation.QUERY);
+      queryState.setCommandType(HiveOperation.QUERY);
     }
 
     return false;
@@ -10543,14 +10544,14 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         return false;
       }
     } else {
-      SessionState.get().setCommandType(HiveOperation.QUERY);
+      queryState.setCommandType(HiveOperation.QUERY);
     }
 
     // 3. analyze create view command
     if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW
         || (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) {
       child = analyzeCreateView(ast, qb);
-      SessionState.get().setCommandType(HiveOperation.CREATEVIEW);
+      queryState.setCommandType(HiveOperation.CREATEVIEW);
       if (child == null) {
         return false;
       }
@@ -10579,7 +10580,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           throw new IllegalStateException(SemanticAnalyzerFactory.getOperation(ast.getToken().getType()) +
             " is not supported yet.");
         }
-        SessionState.get().setCommandType(SemanticAnalyzerFactory.getOperation(ast.getToken().getType()));
+        queryState.setCommandType(SemanticAnalyzerFactory.getOperation(ast.getToken().getType()));
         return false;
     }
 
@@ -10677,7 +10678,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
     // 4. Generate Parse Context for Optimizer & Physical compiler
     copyInfoToQueryProperties(queryProperties);
-    ParseContext pCtx = new ParseContext(conf, opToPartPruner, opToPartList, topOps,
+    ParseContext pCtx = new ParseContext(queryState, opToPartPruner, opToPartList, topOps,
         new HashSet<JoinOperator>(joinContext.keySet()),
         new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
@@ -10757,7 +10758,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     // TEZ..)
     if (!ctx.getExplainLogical()) {
       TaskCompiler compiler = TaskCompilerFactory.getCompiler(conf, pCtx);
-      compiler.init(conf, console, db);
+      compiler.init(queryState, console, db);
       compiler.compile(pCtx, rootTasks, inputs, outputs);
       fetchTask = pCtx.getFetchTask();
     }
@@ -11520,7 +11521,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       crtTblDesc.validate(conf);
       // outputs is empty, which means this create table happens in the current
       // database.
-      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
+      queryState.setCommandType(HiveOperation.CREATETABLE);
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
           crtTblDesc), conf));
       break;
@@ -11539,7 +11540,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location,
           storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
           likeTableName, isUserStorageFormat);
-      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
+      queryState.setCommandType(HiveOperation.CREATETABLE);
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
           crtTblLikeDesc), conf));
       break;
@@ -11613,7 +11614,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       tableDesc.setNullFormat(rowFormatParams.nullFormat);
       qb.setTableDesc(tableDesc);
 
-      SessionState.get().setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
+      queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
 
       return selectStmt;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 07ca409..fb8a33c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.parse;
 
 import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -158,24 +160,24 @@ public final class SemanticAnalyzerFactory {
         HiveOperation.ALTERTABLE_UPDATEPARTSTATS});
   }
 
-  public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
+  public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree)
       throws SemanticException {
     if (tree.getToken() == null) {
       throw new RuntimeException("Empty Syntax Tree");
     } else {
-      setSessionCommandType(commandType.get(tree.getType()));
-
+      HiveOperation opType = commandType.get(tree.getType());
+      queryState.setCommandType(opType);
       switch (tree.getType()) {
       case HiveParser.TOK_EXPLAIN:
-        return new ExplainSemanticAnalyzer(conf);
+        return new ExplainSemanticAnalyzer(queryState);
       case HiveParser.TOK_EXPLAIN_SQ_REWRITE:
-        return new ExplainSQRewriteSemanticAnalyzer(conf);
+        return new ExplainSQRewriteSemanticAnalyzer(queryState);
       case HiveParser.TOK_LOAD:
-        return new LoadSemanticAnalyzer(conf);
+        return new LoadSemanticAnalyzer(queryState);
       case HiveParser.TOK_EXPORT:
-        return new ExportSemanticAnalyzer(conf);
+        return new ExportSemanticAnalyzer(queryState);
       case HiveParser.TOK_IMPORT:
-        return new ImportSemanticAnalyzer(conf);
+        return new ImportSemanticAnalyzer(queryState);
       case HiveParser.TOK_ALTERTABLE: {
         Tree child = tree.getChild(1);
         switch (child.getType()) {
@@ -193,13 +195,13 @@ public final class SemanticAnalyzerFactory {
           case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
           case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
           case HiveParser.TOK_ALTERTABLE_SKEWED:
-          setSessionCommandType(commandType.get(child.getType()));
-          return new DDLSemanticAnalyzer(conf);
+          queryState.setCommandType(commandType.get(child.getType()));
+          return new DDLSemanticAnalyzer(queryState);
         }
-        HiveOperation commandType =
+        opType =
             tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0];
-        setSessionCommandType(commandType);
-        return new DDLSemanticAnalyzer(conf);
+        queryState.setCommandType(opType);
+        return new DDLSemanticAnalyzer(queryState);
       }
       case HiveParser.TOK_ALTERVIEW: {
         Tree child = tree.getChild(1);
@@ -209,13 +211,14 @@ public final class SemanticAnalyzerFactory {
           case HiveParser.TOK_ALTERVIEW_ADDPARTS:
           case HiveParser.TOK_ALTERVIEW_DROPPARTS:
           case HiveParser.TOK_ALTERVIEW_RENAME:
-            setSessionCommandType(commandType.get(child.getType()));
-            return new DDLSemanticAnalyzer(conf);
+            opType = commandType.get(child.getType());
+            queryState.setCommandType(opType);
+            return new DDLSemanticAnalyzer(queryState);
         }
         // TOK_ALTERVIEW_AS
         assert child.getType() == HiveParser.TOK_QUERY;
-        setSessionCommandType(HiveOperation.ALTERVIEW_AS);
-        return new SemanticAnalyzer(conf);
+        queryState.setCommandType(HiveOperation.ALTERVIEW_AS);
+        return new SemanticAnalyzer(queryState);
       }
       case HiveParser.TOK_CREATEDATABASE:
       case HiveParser.TOK_DROPDATABASE:
@@ -265,23 +268,23 @@ public final class SemanticAnalyzerFactory {
       case HiveParser.TOK_TRUNCATETABLE:
       case HiveParser.TOK_SHOW_SET_ROLE:
       case HiveParser.TOK_CACHE_METADATA:
-        return new DDLSemanticAnalyzer(conf);
+        return new DDLSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_CREATEFUNCTION:
       case HiveParser.TOK_DROPFUNCTION:
       case HiveParser.TOK_RELOADFUNCTION:
-        return new FunctionSemanticAnalyzer(conf);
+        return new FunctionSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_ANALYZE:
-        return new ColumnStatsSemanticAnalyzer(conf);
+        return new ColumnStatsSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_CREATEMACRO:
       case HiveParser.TOK_DROPMACRO:
-        return new MacroSemanticAnalyzer(conf);
+        return new MacroSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_UPDATE_TABLE:
       case HiveParser.TOK_DELETE_FROM:
-        return new UpdateDeleteSemanticAnalyzer(conf);
+        return new UpdateDeleteSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_START_TRANSACTION:
       case HiveParser.TOK_COMMIT:
@@ -289,20 +292,14 @@ public final class SemanticAnalyzerFactory {
       case HiveParser.TOK_SET_AUTOCOMMIT:
       default: {
         SemanticAnalyzer semAnalyzer = HiveConf
-            .getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(conf)
-            : new SemanticAnalyzer(conf);
+            .getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
+                new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
         return semAnalyzer;
       }
       }
     }
   }
 
-  private static void setSessionCommandType(HiveOperation commandType) {
-    if (SessionState.get() != null) {
-      SessionState.get().setCommandType(commandType);
-    }
-  }
-
   private SemanticAnalyzerFactory() {
     // prevent instantiation
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index 75ca5f9..7efc987 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ColumnStatsTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.StatsTask;
@@ -82,10 +83,12 @@ public abstract class TaskCompiler {
   // Assumes one instance of this + single-threaded compilation for each query.
   protected Hive db;
   protected LogHelper console;
+  protected QueryState queryState;
   protected HiveConf conf;
 
-  public void init(HiveConf conf, LogHelper console, Hive db) {
-    this.conf = conf;
+  public void init(QueryState queryState, LogHelper console, Hive db) {
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.db = db;
     this.console = console;
   }
@@ -447,7 +450,7 @@ public abstract class TaskCompiler {
    * Create a clone of the parse context
    */
   public ParseContext getParseContext(ParseContext pCtx, List<Task<? extends Serializable>> rootTasks) {
-    ParseContext clone = new ParseContext(conf,
+    ParseContext clone = new ParseContext(queryState,
         pCtx.getOpToPartPruner(), pCtx.getOpToPartList(), pCtx.getTopOps(),
         pCtx.getJoinOps(), pCtx.getSmbMapJoinOps(),
         pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(),

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index 8e67ce8..cf7a875 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -35,6 +35,7 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator;
 import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
@@ -101,8 +102,8 @@ public class TezCompiler extends TaskCompiler {
   }
 
   @Override
-  public void init(HiveConf conf, LogHelper console, Hive db) {
-    super.init(conf, console, db);
+  public void init(QueryState queryState, LogHelper console, Hive db) {
+    super.init(queryState, console, db);
 
     // Tez requires us to use RPC for the query plan
     HiveConf.setBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN, true);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
index 5b4365c..b8771d2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -52,8 +53,8 @@ public class UpdateDeleteSemanticAnalyzer extends SemanticAnalyzer {
 
   boolean useSuper = false;
 
-  public UpdateDeleteSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public UpdateDeleteSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index 80ccb28..52186b4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -187,7 +187,7 @@ public class SparkProcessAnalyzeTable implements NodeProcessor {
 
     @SuppressWarnings("unchecked")
     Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, parseContext.getConf());
-    partialScanTask.initialize(parseContext.getConf(), null, driverCxt,
+    partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt,
         tableScan.getCompilationOpContext());
     partialScanTask.setWork(scanWork);
     statsWork.setSourceTask(partialScanTask);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 344dd34..672df63 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -122,7 +122,7 @@ public class SessionState {
   /**
    * current configuration.
    */
-  private final HiveConf conf;
+  private final HiveConf sessionConf;
 
   /**
    * silent mode.
@@ -135,7 +135,7 @@ public class SessionState {
   protected boolean isVerbose;
 
   /**
-   * Is the query served from HiveServer2
+   * The flag to indicate if the session serves the queries from HiveServer2 or not.
    */
   private boolean isHiveServerQuery = false;
 
@@ -171,11 +171,6 @@ public class SessionState {
    */
   protected File tmpErrOutputFile;
 
-  /**
-   * type of the command.
-   */
-  private HiveOperation commandType;
-
   private String lastCommand;
 
   private HiveAuthorizationProvider authorizer;
@@ -287,7 +282,7 @@ public class SessionState {
   }
 
   public HiveConf getConf() {
-    return conf;
+    return sessionConf;
   }
 
 
@@ -316,8 +311,8 @@ public class SessionState {
   }
 
   public boolean getIsSilent() {
-    if(conf != null) {
-      return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
+    if(sessionConf != null) {
+      return sessionConf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
     } else {
       return isSilent;
     }
@@ -328,8 +323,8 @@ public class SessionState {
   }
 
   public void setIsSilent(boolean isSilent) {
-    if(conf != null) {
-      conf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent);
+    if(sessionConf != null) {
+      sessionConf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent);
     }
     this.isSilent = isSilent;
   }
@@ -355,7 +350,7 @@ public class SessionState {
   }
 
   public SessionState(HiveConf conf, String userName) {
-    this.conf = conf;
+    this.sessionConf = conf;
     this.userName = userName;
     if (LOG.isDebugEnabled()) {
       LOG.debug("SessionState user: " + userName);
@@ -375,23 +370,11 @@ public class SessionState {
     parentLoader = SessionState.class.getClassLoader();
     // Make sure that each session has its own UDFClassloader. For details see {@link UDFClassLoader}
     final ClassLoader currentLoader = Utilities.createUDFClassLoader((URLClassLoader) parentLoader, new String[]{});
-    this.conf.setClassLoader(currentLoader);
+    this.sessionConf.setClassLoader(currentLoader);
     resourceDownloader = new ResourceDownloader(conf,
         HiveConf.getVar(conf, ConfVars.DOWNLOADED_RESOURCES_DIR));
   }
 
-  public void setCmd(String cmdString) {
-    conf.setQueryString(cmdString);
-  }
-
-  public String getCmd() {
-    return (conf.getQueryString());
-  }
-
-  public String getQueryId() {
-    return (conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
-  }
-
   public Map<String, String> getHiveVariables() {
     if (hiveVariables == null) {
       hiveVariables = new HashMap<String, String>();
@@ -404,7 +387,7 @@ public class SessionState {
   }
 
   public String getSessionId() {
-    return (conf.getVar(HiveConf.ConfVars.HIVESESSIONID));
+    return (sessionConf.getVar(HiveConf.ConfVars.HIVESESSIONID));
   }
 
   public void updateThreadName() {
@@ -457,9 +440,9 @@ public class SessionState {
   public HadoopShims.HdfsEncryptionShim getHdfsEncryptionShim() throws HiveException {
     if (hdfsEncryptionShim == null) {
       try {
-        FileSystem fs = FileSystem.get(conf);
+        FileSystem fs = FileSystem.get(sessionConf);
         if ("hdfs".equals(fs.getUri().getScheme())) {
-          hdfsEncryptionShim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf);
+          hdfsEncryptionShim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, sessionConf);
         } else {
           LOG.info("Could not get hdfsEncryptionShim, it is only applicable to hdfs filesystem.");
         }
@@ -557,9 +540,9 @@ public class SessionState {
       // Hive object instance should be created with a copy of the conf object. If the conf is
       // shared with SessionState, other parts of the code might update the config, but
       // Hive.get(HiveConf) would not recognize the case when it needs refreshing
-      Hive.get(new HiveConf(startSs.conf)).getMSC();
+      Hive.get(new HiveConf(startSs.sessionConf)).getMSC();
       UserGroupInformation sessionUGI = Utils.getUGI();
-      FileSystem.get(startSs.conf);
+      FileSystem.get(startSs.sessionConf);
 
       // Create scratch dirs for this session
       startSs.createSessionDirs(sessionUGI.getShortUserName());
@@ -611,9 +594,9 @@ public class SessionState {
       }
       // Neither open nor opening.
       if (!isAsync) {
-        startSs.tezSessionState.open(startSs.conf); // should use conf on session start-up
+        startSs.tezSessionState.open(startSs.sessionConf); // should use conf on session start-up
       } else {
-        startSs.tezSessionState.beginOpen(startSs.conf, null, console);
+        startSs.tezSessionState.beginOpen(startSs.sessionConf, null, console);
       }
     } catch (Exception e) {
       throw new RuntimeException(e);
@@ -717,7 +700,7 @@ public class SessionState {
    * @return
    * @throws IOException
    */
-  private void createPath(HiveConf conf, Path path, String permission, boolean isLocal,
+  private static void createPath(HiveConf conf, Path path, String permission, boolean isLocal,
       boolean isCleanUp) throws IOException {
     FsPermission fsPermission = new FsPermission(permission);
     FileSystem fs;
@@ -817,18 +800,18 @@ public class SessionState {
     }
 
     try {
-      authenticator = HiveUtils.getAuthenticator(conf,
+      authenticator = HiveUtils.getAuthenticator(sessionConf,
           HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
       authenticator.setSessionState(this);
 
-      String clsStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
-      authorizer = HiveUtils.getAuthorizeProviderManager(conf,
+      String clsStr = HiveConf.getVar(sessionConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
+      authorizer = HiveUtils.getAuthorizeProviderManager(sessionConf,
           clsStr, authenticator, true);
 
       if (authorizer == null) {
         // if it was null, the new (V2) authorization plugin must be specified in
         // config
-        HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(conf,
+        HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(sessionConf,
             HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
 
         HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
@@ -837,12 +820,12 @@ public class SessionState {
         authzContextBuilder.setSessionString(getSessionId());
 
         authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
-            conf, authenticator, authzContextBuilder.build());
+            sessionConf, authenticator, authzContextBuilder.build());
         setAuthorizerV2Config();
 
       }
       // create the create table grants with new config
-      createTableGrants = CreateTableAutomaticGrant.create(conf);
+      createTableGrants = CreateTableAutomaticGrant.create(sessionConf);
 
     } catch (HiveException e) {
       LOG.error("Error setting up authorization: " + e.getMessage(), e);
@@ -858,23 +841,23 @@ public class SessionState {
 
   private void setAuthorizerV2Config() throws HiveException {
     // avoid processing the same config multiple times, check marker
-    if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) {
+    if (sessionConf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) {
       return;
     }
-    String metastoreHook = conf.get(ConfVars.METASTORE_FILTER_HOOK.name());
+    String metastoreHook = sessionConf.get(ConfVars.METASTORE_FILTER_HOOK.name());
     if (!ConfVars.METASTORE_FILTER_HOOK.getDefaultValue().equals(metastoreHook) &&
         !AuthorizationMetaStoreFilterHook.class.getName().equals(metastoreHook)) {
       LOG.warn(ConfVars.METASTORE_FILTER_HOOK.name() +
           " will be ignored, since hive.security.authorization.manager" +
           " is set to instance of HiveAuthorizerFactory.");
     }
-    conf.setVar(ConfVars.METASTORE_FILTER_HOOK,
+    sessionConf.setVar(ConfVars.METASTORE_FILTER_HOOK,
         AuthorizationMetaStoreFilterHook.class.getName());
 
-    authorizerV2.applyAuthorizationConfigPolicy(conf);
+    authorizerV2.applyAuthorizationConfigPolicy(sessionConf);
     // update config in Hive thread local as well and init the metastore client
     try {
-      Hive.get(conf).getMSC();
+      Hive.get(sessionConf).getMSC();
     } catch (Exception e) {
       // catch-all due to some exec time dependencies on session state
       // that would cause ClassNoFoundException otherwise
@@ -882,7 +865,7 @@ public class SessionState {
     }
 
     // set a marker that this conf has been processed.
-    conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
+    sessionConf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
   }
 
   public Object getActiveAuthorizer() {
@@ -1124,7 +1107,7 @@ public class SessionState {
   public void reloadAuxJars() throws IOException {
     final Set<String> reloadedAuxJars = new HashSet<String>();
 
-    final String renewableJarPath = conf.getVar(ConfVars.HIVERELOADABLEJARS);
+    final String renewableJarPath = sessionConf.getVar(ConfVars.HIVERELOADABLEJARS);
     // do nothing if this property is not specified or empty
     if (renewableJarPath == null || renewableJarPath.isEmpty()) {
       return;
@@ -1154,7 +1137,7 @@ public class SessionState {
         currentCLoader =
             (URLClassLoader) Utilities.addToClassPath(currentCLoader,
                 reloadedAuxJars.toArray(new String[0]));
-        conf.setClassLoader(currentCLoader);
+        sessionConf.setClassLoader(currentCLoader);
         Thread.currentThread().setContextClassLoader(currentCLoader);
       }
       preReloadableAuxJars.clear();
@@ -1394,21 +1377,6 @@ public class SessionState {
     }
   }
 
-  public String getCommandType() {
-    if (commandType == null) {
-      return null;
-    }
-    return commandType.getOperationName();
-  }
-
-  public HiveOperation getHiveOperation() {
-    return commandType;
-  }
-
-  public void setCommandType(HiveOperation commandType) {
-    this.commandType = commandType;
-  }
-
   public HiveAuthorizationProvider getAuthorizer() {
     setupAuth();
     return authorizer;
@@ -1499,7 +1467,7 @@ public class SessionState {
   public void close() throws IOException {
     registry.clear();
     if (txnMgr != null) txnMgr.closeTxnManager();
-    JavaUtils.closeClassLoadersTo(conf.getClassLoader(), parentLoader);
+    JavaUtils.closeClassLoadersTo(sessionConf.getClassLoader(), parentLoader);
     File resourceDir =
         new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
     LOG.debug("Removing resource dir " + resourceDir);
@@ -1526,7 +1494,7 @@ public class SessionState {
     try {
       closeSparkSession();
       registry.closeCUDFLoaders();
-      dropSessionPaths(conf);
+      dropSessionPaths(sessionConf);
       unCacheDataNucleusClassLoaders();
     } finally {
       // removes the threadlocal variables, closes underlying HMS connection
@@ -1536,10 +1504,10 @@ public class SessionState {
 
   private void unCacheDataNucleusClassLoaders() {
     try {
-      Hive threadLocalHive = Hive.get(conf);
+      Hive threadLocalHive = Hive.get(sessionConf);
       if ((threadLocalHive != null) && (threadLocalHive.getMSC() != null)
           && (threadLocalHive.getMSC().isLocalMetaStore())) {
-        if (conf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL).equals(ObjectStore.class.getName())) {
+        if (sessionConf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL).equals(ObjectStore.class.getName())) {
           ObjectStore.unCacheDataNucleusClassLoaders();
         }
       }
@@ -1667,9 +1635,9 @@ public class SessionState {
     queryCurrentTimestamp = new Timestamp(System.currentTimeMillis());
 
     // Provide a facility to set current timestamp during tests
-    if (conf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
+    if (sessionConf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
       String overrideTimestampString =
-          HiveConf.getVar(conf, HiveConf.ConfVars.HIVETESTCURRENTTIMESTAMP, (String)null);
+          HiveConf.getVar(sessionConf, HiveConf.ConfVars.HIVETESTCURRENTTIMESTAMP, (String)null);
       if (overrideTimestampString != null && overrideTimestampString.length() > 0) {
         queryCurrentTimestamp = Timestamp.valueOf(overrideTimestampString);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index eaeb66b..667d5c2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
@@ -71,6 +72,7 @@ import org.apache.hadoop.util.Shell;
  */
 public class TestExecDriver extends TestCase {
 
+  static QueryState queryState;
   static HiveConf conf;
 
   private static final String tmpdir;
@@ -82,7 +84,8 @@ public class TestExecDriver extends TestCase {
 
   static {
     try {
-      conf = new HiveConf(ExecDriver.class);
+      queryState = new QueryState(new HiveConf(ExecDriver.class));
+      conf = queryState.getConf();
       conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
       conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true);
 
@@ -480,7 +483,7 @@ public class TestExecDriver extends TestCase {
     MapRedTask mrtask = new MapRedTask();
     DriverContext dctx = new DriverContext ();
     mrtask.setWork(mr);
-    mrtask.initialize(conf, null, dctx, null);
+    mrtask.initialize(queryState, null, dctx, null);
     int exitVal =  mrtask.execute(dctx);
 
     if (exitVal != 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index 9d7166c..c659806 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -25,6 +25,7 @@ import junit.framework.Assert;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -36,16 +37,18 @@ public class TestMacroSemanticAnalyzer {
 
   private ParseDriver parseDriver;
   private MacroSemanticAnalyzer analyzer;
+  private QueryState queryState;
   private HiveConf conf;
   private Context context;
 
   @Before
   public void setup() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
+    conf = queryState.getConf();
     SessionState.start(conf);
     context = new Context(conf);
     parseDriver = new ParseDriver();
-    analyzer = new MacroSemanticAnalyzer(conf);
+    analyzer = new MacroSemanticAnalyzer(queryState);
   }
 
   private ASTNode parse(String command) throws Exception {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
index ccdf272..8fe4d02 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
@@ -18,9 +18,11 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import junit.framework.Assert;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
@@ -42,11 +44,13 @@ import java.util.Map;
  * Tests for parsing and semantic analysis of ALTER TABLE ... compact.
  */
 public class TestQBCompact {
+  static QueryState queryState;
   static HiveConf conf;
 
   @BeforeClass
   public static void init() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
+    conf = queryState.getConf();
     SessionState.start(conf);
 
     // Create a table so we can work against it
@@ -65,7 +69,7 @@ public class TestQBCompact {
   private AlterTableSimpleDesc parseAndAnalyze(String query) throws Exception {
     ParseDriver hd = new ParseDriver();
     ASTNode head = (ASTNode)hd.parse(query).getChild(0);
-    BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(conf, head);
+    BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
     a.analyze(head, new Context(conf));
     List<Task<? extends Serializable>> roots = a.getRootTasks();
     Assert.assertEquals(1, roots.size());

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
index 70d86c1..e607f10 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
@@ -23,6 +23,7 @@ import java.util.List;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -30,19 +31,21 @@ import org.junit.Test;
 
 public class TestQBJoinTreeApplyPredicate {
 
+  static QueryState queryState;
   static HiveConf conf;
 
   SemanticAnalyzer sA;
 
   @BeforeClass
   public static void initialize() {
-    conf = new HiveConf(SemanticAnalyzer.class);
+    queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+    conf = queryState.getConf();
     SessionState.start(conf);
   }
 
   @Before
   public void setup() throws SemanticException {
-    sA = new CalcitePlanner(conf);
+    sA = new CalcitePlanner(queryState);
   }
 
   static ASTNode constructIdentifier(String nm) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
index f6f0abb..f9db2c8 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Assert;
 import org.junit.Before;
@@ -29,6 +30,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestQBSubQuery {
+  static QueryState queryState;
   static HiveConf conf;
 
   private static String IN_QUERY = " select * " +
@@ -46,14 +48,15 @@ public class TestQBSubQuery {
 
   @BeforeClass
   public static void initialize() {
-    conf = new HiveConf(SemanticAnalyzer.class);
+    queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+    conf = queryState.getConf();
     SessionState.start(conf);
   }
 
   @Before
   public void setup() throws SemanticException {
     pd = new ParseDriver();
-    sA = new CalcitePlanner(conf);
+    sA = new CalcitePlanner(queryState);
   }
 
   ASTNode parse(String query) throws ParseException {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
index 4a52efb..5849950 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
@@ -21,27 +21,30 @@ import junit.framework.Assert;
 
 import org.antlr.runtime.CommonToken;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.junit.Before;
 import org.junit.Test;
 
 public class TestSemanticAnalyzerFactory {
 
+  private QueryState queryState;
   private HiveConf conf;
   
   @Before
   public void setup() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
+    conf = queryState.getConf();
   }
   @Test
   public void testCreate() throws Exception {
     BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
-        get(conf, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO)));
+        get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO)));
     Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
   }
   @Test
   public void testDrop() throws Exception {
     BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
-        get(conf, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO)));
+        get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO)));
     Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index 8614d40..78f5ea9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
@@ -47,6 +48,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
 
   static final private Logger LOG = LoggerFactory.getLogger(TestUpdateDeleteSemanticAnalyzer.class.getName());
 
+  private QueryState queryState;
   private HiveConf conf;
   private Hive db;
 
@@ -221,7 +223,8 @@ public class TestUpdateDeleteSemanticAnalyzer {
 
   @Before
   public void setup() {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
+    conf = queryState.getConf();
     conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
     conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
     conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
@@ -256,7 +259,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
     ASTNode tree = pd.parse(query, ctx);
     tree = ParseUtils.findRootNonNullToken(tree);
 
-    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
     SessionState.get().initTxnMgr(conf);
     db = sem.getDb();
 
@@ -295,7 +298,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
         sem.getFetchTask(), sem, true, false, false, false, false, false, null);
     ExplainTask task = new ExplainTask();
     task.setWork(work);
-    task.initialize(conf, plan, null, null);
+    task.initialize(queryState, plan, null, null);
     task.execute(null);
     FSDataInputStream in = fs.open(tmp);
     StringBuilder builder = new StringBuilder();

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
index 2e85a2a..e8e29ee 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
@@ -22,8 +22,8 @@ import java.util.List;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
@@ -46,10 +46,10 @@ public class AuthorizationTestUtil {
    * @return
    * @throws Exception
    */
-  public static DDLWork analyze(ASTNode ast, HiveConf conf, Hive db) throws Exception {
-    DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(conf, db);
-    SessionState.start(conf);
-    analyzer.analyze(ast, new Context(conf));
+  public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
+    DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
+    SessionState.start(queryState.getConf());
+    analyzer.analyze(ast, new Context(queryState.getConf()));
     List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
     return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
   }
@@ -62,8 +62,8 @@ public class AuthorizationTestUtil {
    * @return
    * @throws Exception
    */
-  public static DDLWork analyze(String command, HiveConf conf, Hive db) throws Exception {
-    return analyze(parse(command), conf, db);
+  public static DDLWork analyze(String command, QueryState queryState, Hive db) throws Exception {
+    return analyze(parse(command), queryState, db);
   }
 
   private static ASTNode parse(String command) throws Exception {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
index e3b82f3..b324dfd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
@@ -21,6 +21,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.GrantDesc;
@@ -34,9 +35,9 @@ public class PrivilegesTestBase {
   protected static final String TABLE_QNAME = DB + "." + TABLE;
   protected static final String USER = "user1";
 
-  public static void grantUserTable(String privStr, PrivilegeType privType, HiveConf conf, Hive db)
+  public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db)
       throws Exception {
-    DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, conf, db);
+    DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db);
     GrantDesc grantDesc = work.getGrantDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
index 038e5fd..bfacbd0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
@@ -19,12 +19,13 @@ package org.apache.hadoop.hive.ql.parse.authorization;
 
 import java.util.HashMap;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -89,7 +90,7 @@ public class TestHiveAuthorizationTaskFactory {
 
   private ParseDriver parseDriver;
   private DDLSemanticAnalyzer analyzer;
-  private HiveConf conf;
+  private QueryState queryState;
   private Context context;
   private String currentUser;
   private Hive db;
@@ -98,7 +99,8 @@ public class TestHiveAuthorizationTaskFactory {
 
   @Before
   public void setup() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
+    HiveConf conf = queryState.getConf();
     conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
         TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName());
     db = Mockito.mock(Hive.class);
@@ -107,7 +109,7 @@ public class TestHiveAuthorizationTaskFactory {
     SessionState.start(conf);
     context = new Context(conf);
     parseDriver = new ParseDriver();
-    analyzer = new DDLSemanticAnalyzer(conf, db);
+    analyzer = new DDLSemanticAnalyzer(queryState, db);
     Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
     Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table);
     Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
@@ -478,7 +480,7 @@ public class TestHiveAuthorizationTaskFactory {
   }
 
   private DDLWork analyze(String command) throws Exception {
-    return AuthorizationTestUtil.analyze(command, conf, db);
+    return AuthorizationTestUtil.analyze(command, queryState, db);
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
index c614630..41584af 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
@@ -22,6 +22,7 @@ import java.util.HashMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -33,18 +34,18 @@ import org.mockito.Mockito;
 
 public class TestPrivilegesV1 extends PrivilegesTestBase{
 
-  private HiveConf conf;
+  private QueryState queryState;
   private Hive db;
   private Table table;
   private Partition partition;
 
   @Before
   public void setup() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
     db = Mockito.mock(Hive.class);
     table = new Table(DB, TABLE);
     partition = new Partition(table);
-    SessionState.start(conf);
+    SessionState.start(queryState.getConf());
     Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
     Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table);
     Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
@@ -81,6 +82,6 @@ public class TestPrivilegesV1 extends PrivilegesTestBase{
   }
 
   private void grantUserTable(String privName, PrivilegeType privType) throws Exception {
-    grantUserTable(privName, privType, conf, db);
+    grantUserTable(privName, privType, queryState, db);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
index 7b28375..c552ba7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.parse.authorization;
 import java.util.HashMap;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -32,15 +33,16 @@ import org.mockito.Mockito;
 
 public class TestPrivilegesV2 extends PrivilegesTestBase{
 
-  private HiveConf conf;
+  private QueryState queryState;
   private Hive db;
   private Table table;
   private Partition partition;
 
   @Before
   public void setup() throws Exception {
-    conf = new HiveConf();
+    queryState = new QueryState(null);
     //set authorization mode to V2
+    HiveConf conf = queryState.getConf();
     conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         SQLStdHiveAuthorizerFactory.class.getName());
     db = Mockito.mock(Hive.class);
@@ -67,7 +69,7 @@ public class TestPrivilegesV2 extends PrivilegesTestBase{
   }
 
   private void grantUserTable(String privName, PrivilegeType privType) throws Exception {
-    grantUserTable(privName, privType, conf, db);
+    grantUserTable(privName, privType, queryState, db);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/service/src/java/org/apache/hive/service/cli/operation/Operation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 4f3e9c2..63b1a48 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
 import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.OperationLog;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -62,7 +62,6 @@ public abstract class Operation {
   private volatile OperationState state = OperationState.INITIALIZED;
   private volatile MetricsScope currentStateScope;
   private final OperationHandle opHandle;
-  private HiveConf configuration;
   public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT;
   public static final Logger LOG = LoggerFactory.getLogger(Operation.class.getName());
   public static final long DEFAULT_FETCH_MAX_ROWS = 100;
@@ -81,13 +80,13 @@ public abstract class Operation {
   protected long operationStart;
   protected long operationComplete;
 
+  protected final QueryState queryState;
+
   protected static final EnumSet<FetchOrientation> DEFAULT_FETCH_ORIENTATION_SET =
       EnumSet.of(FetchOrientation.FETCH_NEXT,FetchOrientation.FETCH_FIRST);
 
   protected Operation(HiveSession parentSession, OperationType opType, boolean runInBackground) {
     this(parentSession, null, opType, runInBackground);
-    // Generate a queryId for the operation if no queryId is provided
-    confOverlay.put(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId());
  }
 
   protected Operation(HiveSession parentSession, Map<String, String> confOverlay, OperationType opType, boolean runInBackground) {
@@ -102,6 +101,11 @@ public abstract class Operation {
     operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(),
         HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS);
     setMetrics(state);
+    queryState = new QueryState(parentSession.getHiveConf(), confOverlay, runAsync);
+  }
+
+  public QueryState getQueryState() {
+    return queryState;
   }
 
   public Future<?> getBackgroundHandle() {
@@ -116,13 +120,6 @@ public abstract class Operation {
     return runAsync;
   }
 
-  public void setConfiguration(HiveConf configuration) {
-    this.configuration = new HiveConf(configuration);
-  }
-
-  public HiveConf getConfiguration() {
-    return new HiveConf(configuration);
-  }
 
   public HiveSession getParentSession() {
     return parentSession;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index b921e6e..01dd48c 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryDisplay;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -125,10 +126,10 @@ public class SQLOperation extends ExecuteStatementOperation {
    * @param sqlOperationConf
    * @throws HiveSQLException
    */
-  public void prepare(HiveConf sqlOperationConf) throws HiveSQLException {
+  public void prepare(QueryState queryState) throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
-      driver = new Driver(sqlOperationConf, getParentSession().getUserName());
+      driver = new Driver(queryState, getParentSession().getUserName());
       sqlOpDisplay.setQueryDisplay(driver.getQueryDisplay());
 
       // set the operation handle information in Driver, so that thrift API users
@@ -181,7 +182,7 @@ public class SQLOperation extends ExecuteStatementOperation {
     }
   }
 
-  private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException {
+  private void runQuery() throws HiveSQLException {
     try {
       // In Hive server mode, we are not able to retry in the FetchTask
       // case, when calling fetch queries since execute() has returned.
@@ -213,11 +214,10 @@ public class SQLOperation extends ExecuteStatementOperation {
   @Override
   public void runInternal() throws HiveSQLException {
     setState(OperationState.PENDING);
-    final HiveConf opConfig = getConfigForOperation();
 
-    prepare(opConfig);
+    prepare(queryState);
     if (!shouldRunAsync()) {
-      runQuery(opConfig);
+      runQuery();
     } else {
       // We'll pass ThreadLocals in the background thread from the foreground (handler) thread
       final SessionState parentSessionState = SessionState.get();
@@ -226,7 +226,7 @@ public class SQLOperation extends ExecuteStatementOperation {
       final Hive parentHive = parentSession.getSessionHive();
       // Current UGI will get used by metastore when metsatore is in embedded mode
       // So this needs to get passed to the new background thread
-      final UserGroupInformation currentUGI = getCurrentUGI(opConfig);
+      final UserGroupInformation currentUGI = getCurrentUGI();
       // Runnable impl to call runInternal asynchronously,
       // from a different thread
       Runnable backgroundOperation = new Runnable() {
@@ -241,7 +241,7 @@ public class SQLOperation extends ExecuteStatementOperation {
               registerCurrentOperationLog();
               registerLoggingContext();
               try {
-                runQuery(opConfig);
+                runQuery();
               } catch (HiveSQLException e) {
                 setOperationException(e);
                 LOG.error("Error running hive query: ", e);
@@ -292,7 +292,7 @@ public class SQLOperation extends ExecuteStatementOperation {
    * @return UserGroupInformation
    * @throws HiveSQLException
    */
-  private UserGroupInformation getCurrentUGI(HiveConf opConfig) throws HiveSQLException {
+  private UserGroupInformation getCurrentUGI() throws HiveSQLException {
     try {
       return Utils.getUGI();
     } catch (Exception e) {
@@ -361,14 +361,13 @@ public class SQLOperation extends ExecuteStatementOperation {
   public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
     throws HiveSQLException {
 
-    HiveConf hiveConf = getConfigForOperation();
     validateDefaultFetchOrientation(orientation);
     assertState(new ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
 
     FetchTask fetchTask = driver.getFetchTask();
     boolean isBlobBased = false;
 
-    if (fetchTask != null && fetchTask.getWork().isHiveServerQuery() && HiveConf.getBoolVar(hiveConf,
+    if (fetchTask != null && fetchTask.getWork().isHiveServerQuery() && HiveConf.getBoolVar(queryState.getConf(),
         HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS)
         && (fetchTask.getTblDesc().getSerdeClassName().equalsIgnoreCase(ThriftJDBCBinarySerDe.class
             .getName()))) {
@@ -519,34 +518,6 @@ public class SQLOperation extends ExecuteStatementOperation {
   }
 
   /**
-   * If there are query specific settings to overlay, then create a copy of config
-   * There are two cases we need to clone the session config that's being passed to hive driver
-   * 1. Async query -
-   *    If the client changes a config setting, that shouldn't reflect in the execution already underway
-   * 2. confOverlay -
-   *    The query specific settings should only be applied to the query config and not session
-   * @return new configuration
-   * @throws HiveSQLException
-   */
-  public HiveConf getConfigForOperation() throws HiveSQLException {
-    HiveConf sqlOperationConf = getParentSession().getHiveConf();
-    if (!confOverlay.isEmpty() || shouldRunAsync()) {
-      // clone the partent session config for this query
-      sqlOperationConf = new HiveConf(sqlOperationConf);
-
-      // apply overlay query specific settings, if any
-      for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
-        try {
-          sqlOperationConf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
-        } catch (IllegalArgumentException e) {
-          throw new HiveSQLException("Error applying statement specific settings", e);
-        }
-      }
-    }
-    return sqlOperationConf;
-  }
-
-  /**
    * Get summary information of this SQLOperation for display in WebUI.
    */
   public SQLOperationDisplay getSQLOperationDisplay() {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java
index fe93426..c521706 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java
@@ -41,7 +41,7 @@ public class SQLOperationDisplay {
   public SQLOperationDisplay(SQLOperation sqlOperation) throws HiveSQLException {
     this.state = sqlOperation.getState();
     this.userName = sqlOperation.getParentSession().getUserName();
-    this.executionEngine = sqlOperation.getConfigForOperation().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
+    this.executionEngine = sqlOperation.getQueryState().getConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
     this.beginTime = System.currentTimeMillis();
     this.operationId = sqlOperation.getHandle().getHandleIdentifier().toString();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index c24f162..0cfec7a 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -87,7 +87,7 @@ public class HiveSessionImpl implements HiveSession {
   private final SessionHandle sessionHandle;
   private String username;
   private final String password;
-  private final HiveConf hiveConf;
+  private final HiveConf sessionConf;
   private final long creationTime;
   // TODO: some SessionState internals are not thread safe. The compile-time internals are synced
   //       via session-scope or global compile lock. The run-time internals work by magic!
@@ -121,24 +121,24 @@ public class HiveSessionImpl implements HiveSession {
     this.password = password;
     creationTime = System.currentTimeMillis();
     this.sessionHandle = sessionHandle != null ? sessionHandle : new SessionHandle(protocol);
-    this.hiveConf = new HiveConf(serverhiveConf);
+    this.sessionConf = new HiveConf(serverhiveConf);
     this.ipAddress = ipAddress;
     try {
       // In non-impersonation mode, map scheduler queue to current user
       // if fair scheduler is configured.
-      if (! hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) &&
-        hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_MAP_FAIR_SCHEDULER_QUEUE)) {
-        ShimLoader.getHadoopShims().refreshDefaultQueue(hiveConf, username);
+      if (! sessionConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) &&
+        sessionConf.getBoolVar(ConfVars.HIVE_SERVER2_MAP_FAIR_SCHEDULER_QUEUE)) {
+        ShimLoader.getHadoopShims().refreshDefaultQueue(sessionConf, username);
       }
     } catch (IOException e) {
       LOG.warn("Error setting scheduler queue: " + e, e);
     }
     // Set an explicit session name to control the download directory name
-    hiveConf.set(ConfVars.HIVESESSIONID.varname,
+    sessionConf.set(ConfVars.HIVESESSIONID.varname,
         this.sessionHandle.getHandleIdentifier().toString());
     // Use thrift transportable formatter
-    hiveConf.set(SerDeUtils.LIST_SINK_OUTPUT_FORMATTER, ThriftFormatter.class.getName());
-    hiveConf.setInt(SerDeUtils.LIST_SINK_OUTPUT_PROTOCOL, protocol.getValue());
+    sessionConf.set(SerDeUtils.LIST_SINK_OUTPUT_FORMATTER, ThriftFormatter.class.getName());
+    sessionConf.setInt(SerDeUtils.LIST_SINK_OUTPUT_PROTOCOL, protocol.getValue());
   }
 
   public HiveSessionImpl(TProtocolVersion protocol, String username, String password,
@@ -158,7 +158,7 @@ public class HiveSessionImpl implements HiveSession {
    * That's why it is important to create SessionState here rather than in the constructor.
    */
   public void open(Map<String, String> sessionConfMap) throws HiveSQLException {
-    sessionState = new SessionState(hiveConf, username);
+    sessionState = new SessionState(sessionConf, username);
     sessionState.setUserIpAddress(ipAddress);
     sessionState.setIsHiveServerQuery(true);
     sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses());
@@ -224,7 +224,7 @@ public class HiveSessionImpl implements HiveSession {
     IHiveFileProcessor processor = new GlobalHivercFileProcessor();
 
     try {
-      String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
+      String hiverc = sessionConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
       if (hiverc != null) {
         File hivercFile = new File(hiverc);
         if (hivercFile.isDirectory()) {
@@ -258,7 +258,7 @@ public class HiveSessionImpl implements HiveSession {
       } else if (key.startsWith("use:")) {
         SessionState.get().setCurrentDatabase(entry.getValue());
       } else {
-        hiveConf.verifyAndSet(key, entry.getValue());
+        sessionConf.verifyAndSet(key, entry.getValue());
       }
     }
   }
@@ -380,8 +380,8 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public HiveConf getHiveConf() {
-    hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS);
-    return hiveConf;
+    sessionConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS);
+    return sessionConf;
   }
 
   @Override
@@ -442,16 +442,6 @@ public class HiveSessionImpl implements HiveSession {
           throws HiveSQLException {
     acquire(true);
 
-    // Make a copy of confOverlay
-    if (confOverlay == null) {
-      confOverlay = new HashMap<String, String>();
-    } else {
-      Map<String, String> conf = new HashMap<String, String>();
-      conf.putAll(confOverlay);
-      confOverlay = conf;
-    }
-    confOverlay.put(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId());
-
     OperationManager operationManager = getOperationManager();
     ExecuteStatementOperation operation = operationManager
         .newExecuteStatementOperation(getSession(), statement, confOverlay, runAsync);
@@ -578,7 +568,7 @@ public class HiveSessionImpl implements HiveSession {
   public OperationHandle getColumns(String catalogName, String schemaName,
       String tableName, String columnName)  throws HiveSQLException {
     acquire(true);
-    String addedJars = Utilities.getResourceFiles(hiveConf, SessionState.ResourceType.JAR);
+    String addedJars = Utilities.getResourceFiles(sessionConf, SessionState.ResourceType.JAR);
     if (StringUtils.isNotBlank(addedJars)) {
        IMetaStoreClient metastoreClient = getSession().getMetaStoreClient();
        metastoreClient.setHiveAddedJars(addedJars);
@@ -788,7 +778,7 @@ public class HiveSessionImpl implements HiveSession {
       if (fetchType == FetchType.QUERY_OUTPUT) {
         return operationManager.getOperationNextRowSet(opHandle, orientation, maxRows);
       }
-      return operationManager.getOperationLogRowSet(opHandle, orientation, maxRows, hiveConf);
+      return operationManager.getOperationLogRowSet(opHandle, orientation, maxRows, sessionConf);
     } finally {
       release(true);
     }


[2/2] hive git commit: HIVE-13424: Refactoring the code to pass a QueryState object rather than HiveConf object (Reviewed by Sergey Shelukhin)

Posted by ai...@apache.org.
HIVE-13424: Refactoring the code to pass a QueryState object rather than HiveConf object (Reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/caa3ec76
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/caa3ec76
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/caa3ec76

Branch: refs/heads/master
Commit: caa3ec761c18d822259116fde9ff8a4f181df179
Parents: 86bdcbc
Author: Aihua Xu <ai...@apache.org>
Authored: Tue Mar 15 13:12:57 2016 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon Apr 25 16:30:09 2016 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   |   5 -
 .../mapreduce/TestHCatMultiOutputFormat.java    |   7 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   9 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  93 ++++-----------
 .../org/apache/hadoop/hive/ql/QueryState.java   | 114 +++++++++++++++++++
 .../hadoop/hive/ql/exec/ColumnStatsTask.java    |   5 +-
 .../hive/ql/exec/ColumnStatsUpdateTask.java     |   6 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   9 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |   4 +-
 .../apache/hadoop/hive/ql/exec/FetchTask.java   |   5 +-
 .../hadoop/hive/ql/exec/FunctionTask.java       |   6 +-
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |   5 +-
 .../org/apache/hadoop/hive/ql/exec/Task.java    |   6 +-
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |  11 +-
 .../hive/ql/exec/mr/HadoopJobExecHelper.java    |  21 ++--
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   7 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |   7 +-
 .../hadoop/hive/ql/history/HiveHistoryImpl.java |   6 +-
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |   4 +-
 .../hadoop/hive/ql/hooks/HookContext.java       |  11 +-
 .../hive/ql/hooks/PostExecutePrinter.java       |  12 +-
 .../hadoop/hive/ql/hooks/PreExecutePrinter.java |  14 ++-
 .../hive/ql/index/TableBasedIndexHandler.java   |   2 -
 .../hadoop/hive/ql/io/merge/MergeFileTask.java  |   7 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |  12 +-
 .../io/rcfile/truncate/ColumnTruncateTask.java  |   7 +-
 .../hive/ql/optimizer/GenMRTableScan1.java      |   2 +-
 .../index/RewriteParseContextGenerator.java     |   7 +-
 .../RewriteQueryUsingAggregateIndexCtx.java     |   2 +-
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  14 ++-
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |  11 +-
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   7 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |  13 ++-
 .../parse/ExplainSQRewriteSemanticAnalyzer.java |   8 +-
 .../hive/ql/parse/ExplainSemanticAnalyzer.java  |   7 +-
 .../hive/ql/parse/ExportSemanticAnalyzer.java   |   5 +-
 .../hive/ql/parse/FunctionSemanticAnalyzer.java |   5 +-
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |   5 +-
 .../hive/ql/parse/LoadSemanticAnalyzer.java     |   5 +-
 .../hive/ql/parse/MacroSemanticAnalyzer.java    |  12 +-
 .../hadoop/hive/ql/parse/ParseContext.java      |  14 ++-
 .../hive/ql/parse/ProcessAnalyzeTable.java      |   2 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  35 +++---
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |  57 +++++-----
 .../hadoop/hive/ql/parse/TaskCompiler.java      |   9 +-
 .../hadoop/hive/ql/parse/TezCompiler.java       |   5 +-
 .../ql/parse/UpdateDeleteSemanticAnalyzer.java  |   5 +-
 .../parse/spark/SparkProcessAnalyzeTable.java   |   2 +-
 .../hadoop/hive/ql/session/SessionState.java    | 106 ++++++-----------
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   7 +-
 .../ql/parse/TestMacroSemanticAnalyzer.java     |   7 +-
 .../hadoop/hive/ql/parse/TestQBCompact.java     |   8 +-
 .../ql/parse/TestQBJoinTreeApplyPredicate.java  |   7 +-
 .../hadoop/hive/ql/parse/TestQBSubQuery.java    |   7 +-
 .../ql/parse/TestSemanticAnalyzerFactory.java   |   9 +-
 .../parse/TestUpdateDeleteSemanticAnalyzer.java |   9 +-
 .../authorization/AuthorizationTestUtil.java    |  14 +--
 .../parse/authorization/PrivilegesTestBase.java |   5 +-
 .../TestHiveAuthorizationTaskFactory.java       |  12 +-
 .../parse/authorization/TestPrivilegesV1.java   |   9 +-
 .../parse/authorization/TestPrivilegesV2.java   |   8 +-
 .../hive/service/cli/operation/Operation.java   |  19 ++--
 .../service/cli/operation/SQLOperation.java     |  49 ++------
 .../cli/operation/SQLOperationDisplay.java      |   2 +-
 .../service/cli/session/HiveSessionImpl.java    |  40 +++----
 65 files changed, 508 insertions(+), 437 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 2bcb56d..f467c81 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -121,8 +121,6 @@ public class CliDriver {
 
     ss.updateThreadName();
 
-    conf.set(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId());
-
     // Flush the print stream, so it doesn't include output from the last command
     ss.err.flush();
     String cmd_trimmed = cmd.trim();
@@ -401,9 +399,6 @@ public class CliDriver {
         }
 
         ret = processCmd(command);
-        //wipe cli query state
-        SessionState ss = SessionState.get();
-        ss.setCommandType(null);
         command = "";
         lastRet = ret;
         boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 1964410..9fa263d 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -26,6 +26,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -369,7 +371,8 @@ public class TestHCatMultiOutputFormat {
    * @throws Exception if any error occurs
    */
   private List<String> getTableData(String table, String database) throws Exception {
-    HiveConf conf = new HiveConf();
+    QueryState queryState = new QueryState(null);
+    HiveConf conf = queryState.getConf();
     conf.addResource("hive-site.xml");
     ArrayList<String> results = new ArrayList<String>();
     ArrayList<String> temp = new ArrayList<String>();
@@ -392,7 +395,7 @@ public class TestHCatMultiOutputFormat {
     }
     FetchTask task = new FetchTask();
     task.setWork(work);
-    task.initialize(conf, null, null, new CompilationOpContext());
+    task.initialize(queryState, null, null, new CompilationOpContext());
     task.fetch(temp);
     for (String str : temp) {
       results.add(str.replace("\t", ","));

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 8473436..a6e8efa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -163,6 +163,7 @@ public class QTestUtil {
   private static MiniClusterType clusterType = MiniClusterType.none;
   private ParseDriver pd;
   protected Hive db;
+  protected QueryState queryState;
   protected HiveConf conf;
   private Driver drv;
   private BaseSemanticAnalyzer sem;
@@ -389,10 +390,12 @@ public class QTestUtil {
       HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
       System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
     }
+
+    queryState = new QueryState(new HiveConf(Driver.class));
     if (useHBaseMetastore) {
       startMiniHBaseCluster();
     } else {
-      conf = new HiveConf(Driver.class);
+      conf = queryState.getConf();
     }
     this.hadoopVer = getHadoopMainVersion(hadoopVer);
     qMap = new TreeMap<String, String>();
@@ -922,7 +925,7 @@ public class QTestUtil {
     drv = new Driver(conf);
     drv.init();
     pd = new ParseDriver();
-    sem = new SemanticAnalyzer(conf);
+    sem = new SemanticAnalyzer(queryState);
   }
 
   public void init(String tname) throws Exception {
@@ -1648,7 +1651,7 @@ public class QTestUtil {
   public void resetParser() throws SemanticException {
     drv.init();
     pd = new ParseDriver();
-    sem = new SemanticAnalyzer(conf);
+    sem = new SemanticAnalyzer(queryState);
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 2a06962..7f72efb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -165,6 +165,9 @@ public class Driver implements CommandProcessor {
   // For WebUI.  Kept alive after queryPlan is freed.
   private final QueryDisplay queryDisplay = new QueryDisplay();
 
+  // Query specific info
+  private QueryState queryState;
+
   private boolean checkConcurrency() {
     boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
     if (!supportConcurrency) {
@@ -290,22 +293,28 @@ public class Driver implements CommandProcessor {
     this.maxRows = maxRows;
   }
 
+  public Driver() {
+    this(new QueryState((SessionState.get() != null) ?
+        SessionState.get().getConf() : new HiveConf()), null);
+  }
+
   /**
    * for backwards compatibility with current tests
    */
   public Driver(HiveConf conf) {
-    this.conf = conf;
-    isParallelEnabled = (conf != null)
-        && HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION);
+    this(new QueryState(conf), null);
   }
 
   public Driver(HiveConf conf, String userName) {
-    this(conf);
-    this.userName = userName;
+    this(new QueryState(conf), userName);
   }
 
-  public Driver() {
-    this((SessionState.get() != null) ? SessionState.get().getConf() : null);
+  public Driver(QueryState queryState, String userName) {
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
+    isParallelEnabled = (conf != null)
+        && HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION);
+    this.userName = userName;
   }
 
   /**
@@ -320,52 +329,6 @@ public class Driver implements CommandProcessor {
   }
 
   /**
-   * Hold state variables specific to each query being executed, that may not
-   * be consistent in the overall SessionState
-   */
-  private static class QueryState {
-    private HiveOperation op;
-    private String cmd;
-    private boolean init = false;
-
-    /**
-     * Initialize the queryState with the query state variables
-     */
-    public void init(HiveOperation op, String cmd) {
-      this.op = op;
-      this.cmd = cmd;
-      this.init = true;
-    }
-
-    public boolean isInitialized() {
-      return this.init;
-    }
-
-    public HiveOperation getOp() {
-      return this.op;
-    }
-
-    public String getCmd() {
-      return this.cmd;
-    }
-  }
-
-  public void saveSession(QueryState qs) {
-    SessionState oldss = SessionState.get();
-    if (oldss != null && oldss.getHiveOperation() != null) {
-      qs.init(oldss.getHiveOperation(), oldss.getCmd());
-    }
-  }
-
-  public void restoreSession(QueryState qs) {
-    SessionState ss = SessionState.get();
-    if (ss != null && qs != null && qs.isInitialized()) {
-      ss.setCmd(qs.getCmd());
-      ss.setCommandType(qs.getOp());
-    }
-  }
-
-  /**
    * Compile a new query, but potentially reset taskID counter.  Not resetting task counter
    * is useful for generating re-entrant QL queries.
    * @param command  The HiveQL query to compile
@@ -392,9 +355,6 @@ public class Driver implements CommandProcessor {
       LOG.warn("WARNING! Query command could not be redacted." + e);
     }
 
-    //holder for parent command type/string when executing reentrant queries
-    QueryState queryState = new QueryState();
-
     if (ctx != null) {
       close();
     }
@@ -402,7 +362,6 @@ public class Driver implements CommandProcessor {
     if (resetTaskIds) {
       TaskFactory.resetId();
     }
-    saveSession(queryState);
 
     String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
 
@@ -447,7 +406,7 @@ public class Driver implements CommandProcessor {
 
 
       perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE);
-      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
       List<HiveSemanticAnalyzerHook> saHooks =
           getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
               HiveSemanticAnalyzerHook.class);
@@ -491,7 +450,7 @@ public class Driver implements CommandProcessor {
       schema = getSchema(sem, conf);
 
       plan = new QueryPlan(queryStr, sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId,
-        SessionState.get().getHiveOperation(), schema, queryDisplay);
+        queryState.getHiveOperation(), schema, queryDisplay);
 
       conf.setQueryString(queryStr);
 
@@ -500,7 +459,7 @@ public class Driver implements CommandProcessor {
 
       // initialize FetchTask right here
       if (plan.getFetchTask() != null) {
-        plan.getFetchTask().initialize(conf, plan, null, ctx.getOpContext());
+        plan.getFetchTask().initialize(queryState, plan, null, ctx.getOpContext());
       }
 
       //do the authorization check
@@ -509,7 +468,7 @@ public class Driver implements CommandProcessor {
 
         try {
           perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
-          doAuthorization(sem, command);
+          doAuthorization(queryState.getHiveOperation(), sem, command);
         } catch (AuthorizationException authExp) {
           console.printError("Authorization failed:" + authExp.getMessage()
               + ". Use SHOW GRANT to get more details.");
@@ -562,7 +521,6 @@ public class Driver implements CommandProcessor {
       double duration = perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.COMPILE)/1000.00;
       ImmutableMap<String, Long> compileHMSTimings = dumpMetaCallTimingWithoutEx("compilation");
       queryDisplay.setHmsTimings(QueryDisplay.Phase.COMPILATION, compileHMSTimings);
-      restoreSession(queryState);
       LOG.info("Completed compiling command(queryId=" + queryId + "); Time taken: " + duration + " seconds");
     }
   }
@@ -589,7 +547,7 @@ public class Driver implements CommandProcessor {
       ASTNode astTree) throws IOException {
     String ret = null;
     ExplainTask task = new ExplainTask();
-    task.initialize(conf, plan, null, ctx.getOpContext());
+    task.initialize(queryState, plan, null, ctx.getOpContext());
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintStream ps = new PrintStream(baos);
     try {
@@ -612,10 +570,9 @@ public class Driver implements CommandProcessor {
    * @throws HiveException
    * @throws AuthorizationException
    */
-  public static void doAuthorization(BaseSemanticAnalyzer sem, String command)
+  public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command)
       throws HiveException, AuthorizationException {
     SessionState ss = SessionState.get();
-    HiveOperation op = ss.getHiveOperation();
     Hive db = sem.getDb();
 
     Set<ReadEntity> additionalInputs = new HashSet<ReadEntity>();
@@ -1530,7 +1487,7 @@ public class Driver implements CommandProcessor {
       resStream = null;
 
       SessionState ss = SessionState.get();
-      hookContext = new HookContext(plan, conf, ctx.getPathToCS(), ss.getUserName(),
+      hookContext = new HookContext(plan, queryState, ctx.getPathToCS(), ss.getUserName(),
           ss.getUserIpAddress(), operationId);
       hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK);
 
@@ -1857,7 +1814,7 @@ public class Driver implements CommandProcessor {
       cxt.incCurJobNo(1);
       console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs);
     }
-    tsk.initialize(conf, plan, cxt, ctx.getOpContext());
+    tsk.initialize(queryState, plan, cxt, ctx.getOpContext());
     TaskResult tskRes = new TaskResult();
     TaskRunner tskRun = new TaskRunner(tsk, tskRes);
 
@@ -1958,7 +1915,7 @@ public class Driver implements CommandProcessor {
         throw new IOException("Error closing the current fetch task", e);
       }
       // FetchTask should not depend on the plan.
-      fetchTask.initialize(conf, null, null, ctx.getOpContext());
+      fetchTask.initialize(queryState, null, null, ctx.getOpContext());
     } else {
       ctx.resetStream();
       resStream = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
new file mode 100644
index 0000000..78715d8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.sql.Timestamp;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+
+/**
+ * The class to store query level info such as queryId. Multiple queries can run
+ * in the same session, so SessionState is to hold common session related info, and
+ * each QueryState is to hold query related info.
+ *
+ */
+public class QueryState {
+  /**
+   * current configuration.
+   */
+  private final HiveConf queryConf;
+  /**
+   * type of the command.
+   */
+  private HiveOperation commandType;
+
+  public QueryState(HiveConf conf) {
+    this(conf, null, false);
+  }
+
+  public QueryState(HiveConf conf, Map<String, String> confOverlay, boolean runAsync) {
+    this.queryConf = createConf(conf, confOverlay, runAsync);
+  }
+
+  /**
+   * If there are query specific settings to overlay, then create a copy of config
+   * There are two cases we need to clone the session config that's being passed to hive driver
+   * 1. Async query -
+   *    If the client changes a config setting, that shouldn't reflect in the execution already underway
+   * 2. confOverlay -
+   *    The query specific settings should only be applied to the query config and not session
+   * @return new configuration
+   */
+  private HiveConf createConf(HiveConf conf,
+      Map<String, String> confOverlay,
+      boolean runAsync) {
+
+    if ( (confOverlay != null && !confOverlay.isEmpty()) ) {
+      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
+
+      // apply overlay query specific settings, if any
+      for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
+        try {
+          conf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
+        } catch (IllegalArgumentException e) {
+          throw new RuntimeException("Error applying statement specific settings", e);
+        }
+      }
+    } else if (runAsync) {
+      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
+    }
+
+    if (conf == null) {
+      conf = new HiveConf();
+    }
+
+    conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
+    return conf;
+  }
+
+  public String getQueryId() {
+    return (queryConf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+  }
+
+  public String getQueryString() {
+    return queryConf.getQueryString();
+  }
+
+  public String getCommandType() {
+    if (commandType == null) {
+      return null;
+    }
+    return commandType.getOperationName();
+  }
+
+  public HiveOperation getHiveOperation() {
+    return commandType;
+  }
+
+  public void setCommandType(HiveOperation commandType) {
+    this.commandType = commandType;
+  }
+
+  public HiveConf getConf() {
+    return queryConf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
index 9059928..05dfa3b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -83,9 +84,9 @@ public class ColumnStatsTask extends Task<ColumnStatsWork> implements Serializab
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
     work.initializeForFetch(opContext);
     try {
       JobConf job = new JobConf(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index bca8a6c..9a6e5c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -27,7 +27,6 @@ import java.util.Map.Entry;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
@@ -45,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -68,9 +68,9 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> {
       .getLogger(ColumnStatsUpdateTask.class);
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
   }
 
   private ColumnStatistics constructColumnStatsFromInput()

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index b26f09d..d2c3ca8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -91,6 +91,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer;
@@ -259,9 +260,9 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
 
     // Pick the formatter to use to display the results.  Either the
     // normal human readable output or a json object.
@@ -663,7 +664,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     }
 
     // initialize the task and execute
-    task.initialize(db.getConf(), getQueryPlan(), driverCxt, opContext);
+    task.initialize(queryState, getQueryPlan(), driverCxt, opContext);
     int ret = task.execute(driverCxt);
     return ret;
   }
@@ -4160,7 +4161,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       truncateWork.setMapperCannotSpanPartns(true);
       DriverContext driverCxt = new DriverContext();
       ColumnTruncateTask taskExec = new ColumnTruncateTask();
-      taskExec.initialize(db.getConf(), null, driverCxt, null);
+      taskExec.initialize(queryState, null, driverCxt, null);
       taskExec.setWork(truncateWork);
       taskExec.setQueryPlan(this.getQueryPlan());
       return taskExec.execute(driverCxt);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 4ce0864..403d57c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -312,7 +312,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
       throws Exception {
 
     BaseSemanticAnalyzer analyzer = work.getAnalyzer();
-    HiveOperation operation = SessionState.get().getHiveOperation();
+    HiveOperation operation = queryState.getHiveOperation();
 
     JSONObject object = new JSONObject(new LinkedHashMap<>());
     Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work);
@@ -349,7 +349,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
 
       SessionState.get().setActiveAuthorizer(authorizer);
       try {
-        Driver.doAuthorization(analyzer, "");
+        Driver.doAuthorization(queryState.getHiveOperation(), analyzer, "");
       } finally {
         SessionState.get().setActiveAuthorizer(delegate);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
index b96ea04..ec9e98e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
@@ -59,9 +60,9 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
     work.initializeForFetch(opContext);
 
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
index 1b971fc..42cdc84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
@@ -25,6 +25,7 @@ import java.util.List;
 
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Multimap;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
@@ -37,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
@@ -62,9 +64,9 @@ public class FunctionTask extends Task<FunctionWork> {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
index 3199ee1..d5ae019 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -84,9 +85,9 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     jc = new JobConf(conf);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index 6c677f5..897af5e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -51,6 +51,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
   public transient HashMap<String, Long> taskCounters;
   public transient TaskHandle taskHandle;
   protected transient HiveConf conf;
+  protected transient QueryState queryState;
   protected transient LogHelper console;
   protected transient QueryPlan queryPlan;
   protected transient DriverContext driverContext;
@@ -124,11 +125,12 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
     return taskHandle;
   }
 
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
       CompilationOpContext opContext) {
     this.queryPlan = queryPlan;
     setInitialized();
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.driverContext = driverContext;
     console = new LogHelper(LOG);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index d164859..639b0da 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -124,7 +125,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
   public ExecDriver() {
     super();
     console = new LogHelper(LOG);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this);
   }
 
   @Override
@@ -142,9 +143,9 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
    * Initialization when invoked from QL.
    */
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
 
     job = new JobConf(conf, ExecDriver.class);
 
@@ -168,7 +169,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
       HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives);
     }
     conf.stripHiddenConfigurations(job);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this);
   }
 
   /**
@@ -178,7 +179,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
     setWork(plan);
     this.job = job;
     console = new LogHelper(LOG, isSilent);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
index 1b296b9..760ba6c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
@@ -33,6 +33,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskHandle;
@@ -75,6 +76,7 @@ public class HadoopJobExecHelper {
   public transient JobID jobId;
   private final LogHelper console;
   private final HadoopJobExecHook callBackObj;
+  private final QueryState queryState;
 
   /**
    * Update counters relevant to this task.
@@ -135,8 +137,9 @@ public class HadoopJobExecHelper {
     this.jobId = jobId;
   }
 
-  public HadoopJobExecHelper(JobConf job, LogHelper console,
+  public HadoopJobExecHelper(QueryState queryState, JobConf job, LogHelper console,
       Task<? extends Serializable> task, HadoopJobExecHook hookCallBack) {
+    this.queryState = queryState;
     this.job = job;
     this.console = console;
     this.task = task;
@@ -250,14 +253,14 @@ public class HadoopJobExecHelper {
 
           String logMapper;
           String logReducer;
-
+          String queryId = queryState.getQueryId();
           TaskReport[] mappers = jc.getMapTaskReports(rj.getID());
           if (mappers == null) {
             logMapper = "no information for number of mappers; ";
           } else {
             numMap = mappers.length;
             if (ss != null) {
-              ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(),
+              ss.getHiveHistory().setTaskProperty(queryId, getId(),
                 Keys.TASK_NUM_MAPPERS, Integer.toString(numMap));
             }
             logMapper = "number of mappers: " + numMap + "; ";
@@ -269,7 +272,7 @@ public class HadoopJobExecHelper {
           } else {
             numReduce = reducers.length;
             if (ss != null) {
-              ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(),
+              ss.getHiveHistory().setTaskProperty(queryId, getId(),
                 Keys.TASK_NUM_REDUCERS, Integer.toString(numReduce));
             }
             logReducer = "number of reducers: " + numReduce;
@@ -355,11 +358,11 @@ public class HadoopJobExecHelper {
       String output = report.toString();
       SessionState ss = SessionState.get();
       if (ss != null) {
-        ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), getId(), ctrs);
-        ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(),
+        ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs);
+        ss.getHiveHistory().setTaskProperty(queryState.getQueryId(), getId(),
             Keys.TASK_HADOOP_PROGRESS, output);
         if (ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) {
-          ss.getHiveHistory().progressTask(SessionState.get().getQueryId(), this.task);
+          ss.getHiveHistory().progressTask(queryState.getQueryId(), this.task);
           this.callBackObj.logPlanProgress(ss);
         }
       }
@@ -386,7 +389,7 @@ public class HadoopJobExecHelper {
       } else {
         SessionState ss = SessionState.get();
         if (ss != null) {
-          ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), getId(), ctrs);
+          ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs);
         }
         success = rj.isSuccessful();
       }
@@ -430,7 +433,7 @@ public class HadoopJobExecHelper {
       console.printInfo("Job running in-process (local Hadoop)");
     } else {
       if (SessionState.get() != null) {
-        SessionState.get().getHiveHistory().setTaskProperty(SessionState.get().getQueryId(),
+        SessionState.get().getHiveHistory().setTaskProperty(queryState.getQueryId(),
             getId(), Keys.TASK_HADOOP_ID, rj.getID().toString());
       }
       console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = "

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index c81b14c..3c1f0de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.BucketMatcher;
 import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -121,13 +122,13 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, ExecDriver.class);
     execContext = new ExecMapperContext(job);
     //we don't use the HadoopJobExecHooks for local tasks
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, null);
   }
 
   public static String now() {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index 7f87adf..0b494aa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapOperator;
@@ -71,9 +72,9 @@ public class SparkTask extends Task<SparkWork> {
   private static final long serialVersionUID = 1L;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
   }
 
   @Override
@@ -137,7 +138,7 @@ public class SparkTask extends Task<SparkWork> {
     console.printInfo("Starting Spark Job = " + jobRef.getJobId());
     if (SessionState.get() != null) {
       SessionState.get().getHiveHistory()
-	  .setQueryProperty(SessionState.get().getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId());
+	  .setQueryProperty(queryState.getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
index a1e35cb..0234fd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
@@ -266,13 +266,9 @@ public class HiveHistoryImpl implements HiveHistory{
   @Override
   public void startTask(String queryId, Task<? extends Serializable> task,
       String taskName) {
-    SessionState ss = SessionState.get();
-    if (ss == null) {
-      return;
-    }
     TaskInfo ti = new TaskInfo();
 
-    ti.hm.put(Keys.QUERY_ID.name(), ss.getQueryId());
+    ti.hm.put(Keys.QUERY_ID.name(), queryId);
     ti.hm.put(Keys.TASK_ID.name(), task.getId());
     ti.hm.put(Keys.TASK_NAME.name(), taskName);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index b7e70be..742edc8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -25,6 +25,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -94,6 +95,7 @@ public class ATSHook implements ExecuteWithHookContext {
   public void run(final HookContext hookContext) throws Exception {
     final long currentTime = System.currentTimeMillis();
     final HiveConf conf = new HiveConf(hookContext.getConf());
+    final QueryState queryState = hookContext.getQueryState();
 
     executor.submit(new Runnable() {
         @Override
@@ -134,7 +136,7 @@ public class ATSHook implements ExecuteWithHookContext {
             );
               @SuppressWarnings("unchecked")
               ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf);
-              explain.initialize(conf, plan, null, null);
+              explain.initialize(queryState, plan, null, null);
               String query = plan.getQueryStr();
               JSONObject explainPlan = explain.getJSONPlan(null, work);
               String logID = conf.getLogIdVar(SessionState.get().getSessionId());

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
index 8e1672f..8db0124 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.TaskRunner;
 import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -43,6 +44,7 @@ public class HookContext {
   }
 
   private QueryPlan queryPlan;
+  private final QueryState queryState;
   private HiveConf conf;
   private List<TaskRunner> completeTaskList;
   private Set<ReadEntity> inputs;
@@ -60,11 +62,12 @@ public class HookContext {
   // TExecuteStatementResp.TOperationHandle.THandleIdentifier.guid
   private final String operationId;
 
-  public HookContext(QueryPlan queryPlan, HiveConf conf,
+  public HookContext(QueryPlan queryPlan, QueryState queryState,
       Map<String, ContentSummary> inputPathToContentSummary, String userName, String ipAddress,
       String operationId) throws Exception {
     this.queryPlan = queryPlan;
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.inputPathToContentSummary = inputPathToContentSummary;
     completeTaskList = new ArrayList<TaskRunner>();
     inputs = queryPlan.getInputs();
@@ -192,4 +195,8 @@ public class HookContext {
   public String getOperationId() {
     return operationId;
   }
+
+  public QueryState getQueryState() {
+    return queryState;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
index 4518315..b4fc125 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
@@ -27,6 +27,7 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency;
@@ -97,15 +98,14 @@ public class PostExecutePrinter implements ExecuteWithHookContext {
   @Override
   public void run(HookContext hookContext) throws Exception {
     assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK);
-    SessionState ss = SessionState.get();
     Set<ReadEntity> inputs = hookContext.getInputs();
     Set<WriteEntity> outputs = hookContext.getOutputs();
     LineageInfo linfo = hookContext.getLinfo();
     UserGroupInformation ugi = hookContext.getUgi();
-    this.run(ss,inputs,outputs,linfo,ugi);
+    this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi);
   }
 
-  public void run(SessionState sess, Set<ReadEntity> inputs,
+  public void run(QueryState queryState, Set<ReadEntity> inputs,
       Set<WriteEntity> outputs, LineageInfo linfo,
       UserGroupInformation ugi) throws Exception {
 
@@ -115,9 +115,9 @@ public class PostExecutePrinter implements ExecuteWithHookContext {
       return;
     }
 
-    if (sess != null) {
-      console.printError("POSTHOOK: query: " + sess.getCmd().trim());
-      console.printError("POSTHOOK: type: " + sess.getCommandType());
+    if (queryState != null) {
+      console.printError("POSTHOOK: query: " + queryState.getQueryString().trim());
+      console.printError("POSTHOOK: type: " + queryState.getCommandType());
     }
 
     PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: ");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
index b5a26d8..232c62d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
@@ -24,6 +24,7 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.hive.common.io.FetchConverter;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -40,8 +41,9 @@ public class PreExecutePrinter implements ExecuteWithHookContext {
   public void run(HookContext hookContext) throws Exception {
     assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK);
     SessionState ss = SessionState.get();
+    QueryState queryState = hookContext.getQueryState();
     if (ss != null && ss.out instanceof FetchConverter) {
-      boolean foundQuery = ss.getHiveOperation() == HiveOperation.QUERY &&
+      boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY &&
               !hookContext.getQueryPlan().isForExplain();
       ((FetchConverter)ss.out).foundQuery(foundQuery);
     }
@@ -49,10 +51,10 @@ public class PreExecutePrinter implements ExecuteWithHookContext {
     Set<ReadEntity> inputs = hookContext.getInputs();
     Set<WriteEntity> outputs = hookContext.getOutputs();
     UserGroupInformation ugi = hookContext.getUgi();
-    this.run(ss,inputs,outputs,ugi);
+    this.run(queryState,inputs,outputs,ugi);
   }
 
-  public void run(SessionState sess, Set<ReadEntity> inputs,
+  public void run(QueryState queryState, Set<ReadEntity> inputs,
       Set<WriteEntity> outputs, UserGroupInformation ugi)
     throws Exception {
 
@@ -62,9 +64,9 @@ public class PreExecutePrinter implements ExecuteWithHookContext {
       return;
     }
 
-    if (sess != null) {
-      console.printError("PREHOOK: query: " + sess.getCmd().trim());
-      console.printError("PREHOOK: type: " + sess.getCommandType());
+    if (queryState != null) {
+      console.printError("PREHOOK: query: " + queryState.getQueryString().trim());
+      console.printError("PREHOOK: type: " + queryState.getCommandType());
     }
 
     printEntities(console, inputs, "PREHOOK: Input: ");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
index 807959e..29886ae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
@@ -26,8 +26,6 @@ import java.util.Set;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.Task;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
index 82629c1..6b0343b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -60,11 +61,11 @@ public class MergeFileTask extends Task<MergeFileWork> implements Serializable,
   private boolean success = true;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, MergeFileTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 71371a3..d31510d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
@@ -84,11 +85,11 @@ public class PartialScanTask extends Task<PartialScanWork> implements
   protected HadoopJobExecHelper jobExecHelper;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, PartialScanTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this);
   }
 
   @Override
@@ -331,7 +332,6 @@ public class PartialScanTask extends Task<PartialScanWork> implements
     if (jobConfFileName != null) {
       conf.addResource(new Path(jobConfFileName));
     }
-    HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class);
 
     org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName());
     boolean isSilent = HiveConf.getBoolVar(conf,
@@ -348,11 +348,11 @@ public class PartialScanTask extends Task<PartialScanWork> implements
       }
     }
 
-
+    QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class));
     PartialScanWork mergeWork = new PartialScanWork(inputPaths);
     DriverContext driverCxt = new DriverContext();
     PartialScanTask taskExec = new PartialScanTask();
-    taskExec.initialize(hiveConf, null, driverCxt, new CompilationOpContext());
+    taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext());
     taskExec.setWork(mergeWork);
     int ret = taskExec.execute(driverCxt);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
index bc21da0..8acd6e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
@@ -59,11 +60,11 @@ public class ColumnTruncateTask extends Task<ColumnTruncateWork> implements Seri
   protected HadoopJobExecHelper jobExecHelper;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, ColumnTruncateTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index 9c979be..669d56f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -204,7 +204,7 @@ public class GenMRTableScan1 implements NodeProcessor {
     // partial scan task
     DriverContext driverCxt = new DriverContext();
     Task<PartialScanWork> psTask = TaskFactory.get(scanWork, parseCtx.getConf());
-    psTask.initialize(parseCtx.getConf(), null, driverCxt, op.getCompilationOpContext());
+    psTask.initialize(parseCtx.getQueryState(), null, driverCxt, op.getCompilationOpContext());
     psTask.setWork(scanWork);
 
     // task dependency

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
index 64f9734..340d29a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
@@ -24,6 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
@@ -56,16 +57,16 @@ public final class RewriteParseContextGenerator {
    * @param command
    * @throws SemanticException
    */
-  public static Operator<? extends OperatorDesc> generateOperatorTree(HiveConf conf,
+  public static Operator<? extends OperatorDesc> generateOperatorTree(QueryState queryState,
       String command) throws SemanticException {
     Operator<? extends OperatorDesc> operatorTree;
     try {
-      Context ctx = new Context(conf);
+      Context ctx = new Context(queryState.getConf());
       ParseDriver pd = new ParseDriver();
       ASTNode tree = pd.parse(command, ctx);
       tree = ParseUtils.findRootNonNullToken(tree);
 
-      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
       assert(sem instanceof SemanticAnalyzer);
       operatorTree = doSemanticAnalysis((SemanticAnalyzer) sem, tree, ctx);
       LOG.info("Sub-query Semantic Analysis Completed");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
index 74bedcb..3d11907 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
@@ -268,7 +268,7 @@ public final class RewriteQueryUsingAggregateIndexCtx  implements NodeProcessorC
           + rewriteQueryCtx.getIndexKey() + " ";
       // retrieve the operator tree for the query, and the required GroupByOperator from it
       Operator<?> newOperatorTree = RewriteParseContextGenerator.generateOperatorTree(
-              rewriteQueryCtx.getParseContext().getConf(),
+              rewriteQueryCtx.getParseContext().getQueryState(),
               selReplacementCommand);
 
       // we get our new GroupByOperator here

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 19342a8..cbb1b0a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -87,6 +88,7 @@ public abstract class BaseSemanticAnalyzer {
   // Assumes one instance of this + single-threaded compilation for each query.
   protected final Hive db;
   protected final HiveConf conf;
+  protected final QueryState queryState;
   protected List<Task<? extends Serializable>> rootTasks;
   protected FetchTask fetchTask;
   protected final Logger LOG;
@@ -199,13 +201,14 @@ public abstract class BaseSemanticAnalyzer {
     }
   }
 
-  public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    this(conf, createHiveDB(conf));
+  public BaseSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    this(queryState, createHiveDB(queryState.getConf()));
   }
 
-  public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException {
+  public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException {
     try {
-      this.conf = conf;
+      this.queryState = queryState;
+      this.conf = queryState.getConf();
       this.db = db;
       rootTasks = new ArrayList<Task<? extends Serializable>>();
       LOG = LoggerFactory.getLogger(this.getClass().getName());
@@ -1502,4 +1505,7 @@ public abstract class BaseSemanticAnalyzer {
   public HashSet<WriteEntity> getAllOutputs() {
     return outputs;
   }
+  public QueryState getQueryState() {
+    return queryState;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index c069dc4..8e00e0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -104,6 +104,7 @@ import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -217,8 +218,8 @@ public class CalcitePlanner extends SemanticAnalyzer {
   private boolean disableSemJoinReordering = true;
   private EnumSet<ExtendedCBOProfile> profilesCBO;
 
-  public CalcitePlanner(HiveConf conf) throws SemanticException {
-    super(conf);
+  public CalcitePlanner(QueryState queryState) throws SemanticException {
+    super(queryState);
     if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) {
       runCBO = false;
       disableSemJoinReordering = false;
@@ -514,18 +515,18 @@ public class CalcitePlanner extends SemanticAnalyzer {
     createTable.addChild(temporary);
     createTable.addChild(cte.cteNode);
 
-    CalcitePlanner analyzer = new CalcitePlanner(conf);
+    CalcitePlanner analyzer = new CalcitePlanner(queryState);
     analyzer.initCtx(ctx);
     analyzer.init(false);
 
     // should share cte contexts
     analyzer.aliasToCTEs.putAll(aliasToCTEs);
 
-    HiveOperation operation = SessionState.get().getHiveOperation();
+    HiveOperation operation = queryState.getHiveOperation();
     try {
       analyzer.analyzeInternal(createTable);
     } finally {
-      SessionState.get().setCommandType(operation);
+      queryState.setCommandType(operation);
     }
 
     Table table = analyzer.tableDesc.toTable(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index bb1bbad..3b6cbce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -25,15 +25,14 @@ import java.util.Map;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.HiveStatsUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -62,8 +61,8 @@ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer {
   private List<String> colType;
   private Table tbl;
 
-  public ColumnStatsSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ColumnStatsSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   private boolean shouldRewrite(ASTNode tree) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 46d2342..04e2a41 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
@@ -229,12 +230,12 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     return typeName;
   }
 
-  public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    this(conf, createHiveDB(conf));
+  public DDLSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    this(queryState, createHiveDB(queryState.getConf()));
   }
 
-  public DDLSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException {
-    super(conf, db);
+  public DDLSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException {
+    super(queryState, db);
     reservedPartitionValues = new HashSet<String>();
     // Partition can't have this name
     reservedPartitionValues.add(HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME));
@@ -1350,9 +1351,9 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
         .getChild(0));
     EnvironmentContext environmentContext = null;
-    if (SessionState.get().getCommandType()
+    if (queryState.getCommandType()
         .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName())
-        || SessionState.get().getCommandType()
+        || queryState.getCommandType()
             .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
       // we need to check if the properties are valid, especially for stats.
       boolean changeStatsSucceeded = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
index 6f0f3a6..8d7fd92 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
@@ -19,8 +19,8 @@
 
 import java.util.List;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainSQRewriteTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
 public class ExplainSQRewriteSemanticAnalyzer extends BaseSemanticAnalyzer {
   List<FieldSchema> fieldList;
 
-  public ExplainSQRewriteSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ExplainSQRewriteSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @SuppressWarnings("unchecked")
@@ -42,7 +42,7 @@ public class ExplainSQRewriteSemanticAnalyzer extends BaseSemanticAnalyzer {
     // Create a semantic analyzer for the query
     ASTNode input = (ASTNode) ast.getChild(0);
     SemanticAnalyzer sem = (SemanticAnalyzer)
-        SemanticAnalyzerFactory.get(conf, input);
+        SemanticAnalyzerFactory.get(queryState, input);
     sem.analyze(input, ctx);
     sem.validate();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index eefc145..75753b0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -37,8 +38,8 @@ import org.apache.hadoop.hive.ql.plan.ExplainWork;
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
   List<FieldSchema> fieldList;
 
-  public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ExplainSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @SuppressWarnings("unchecked")
@@ -70,7 +71,7 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     // Create a semantic analyzer for the query
     ASTNode input = (ASTNode) ast.getChild(0);
-    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, input);
+    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, input);
     sem.analyze(input, ctx);
     sem.validate();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
index fe8147a..475f2c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -47,8 +48,8 @@ public class ExportSemanticAnalyzer extends BaseSemanticAnalyzer {
 
   private ReplicationSpec replicationSpec;
 
-  public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ExportSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
index be908d3..1ec45ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -49,8 +50,8 @@ public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer {
   private static final Logger LOG = LoggerFactory
       .getLogger(FunctionSemanticAnalyzer.class);
 
-  public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public FunctionSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index 549d24f..a9bc271 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -74,8 +75,8 @@ public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
 
   public static final String METADATA_NAME="_metadata";
 
-  public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ImportSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   private boolean tableExists = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index b90616f..a49b813 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -58,8 +59,8 @@ import com.google.common.collect.Lists;
  */
 public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
-  public LoadSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public LoadSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   public static FileStatus[] matchFilesOrDir(FileSystem fs, Path path)

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
index e394914..fe065f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -44,13 +45,10 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
 import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
@@ -62,8 +60,8 @@ public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer {
   private static final Logger LOG = LoggerFactory
       .getLogger(MacroSemanticAnalyzer.class);
 
-  public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public MacroSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   @Override
@@ -132,8 +130,8 @@ public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer {
       throw new SemanticException("At least one parameter name was used more than once "
           + macroColNames);
     }
-    SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(
-        conf) : new SemanticAnalyzer(conf);
+    SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
+        new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
     ;
     ExprNodeDesc body;
     if(isNoArgumentMacro) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
index 1bccf20..5d0f905 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
@@ -28,6 +28,7 @@ import java.util.Set;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
@@ -78,6 +79,7 @@ public class ParseContext {
   private List<LoadTableDesc> loadTableWork;
   private List<LoadFileDesc> loadFileWork;
   private Context ctx;
+  private QueryState queryState;
   private HiveConf conf;
   private HashMap<String, String> idToTableNameMap;
   private int destTableId;
@@ -153,7 +155,7 @@ public class ParseContext {
    * @param rootTasks
    */
   public ParseContext(
-      HiveConf conf,
+      QueryState queryState,
       HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner,
       HashMap<TableScanOperator, PrunedPartitionList> opToPartList,
       HashMap<String, TableScanOperator> topOps,
@@ -173,7 +175,8 @@ public class ParseContext {
       List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting,
       AnalyzeRewriteContext analyzeRewrite, CreateTableDesc createTableDesc,
       QueryProperties queryProperties, Map<SelectOperator, Table> viewProjectToTableSchema) {
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.opToPartPruner = opToPartPruner;
     this.opToPartList = opToPartList;
     this.joinOps = joinOps;
@@ -241,6 +244,13 @@ public class ParseContext {
   }
 
   /**
+   * @return the hive conf
+   */
+  public QueryState getQueryState() {
+    return queryState;
+  }
+
+  /**
    * @return the opToPartPruner
    */
   public HashMap<TableScanOperator, ExprNodeDesc> getOpToPartPruner() {