You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2015/09/22 01:03:15 UTC

[3/8] hive git commit: HIVE-11891 - Add basic performance logging to metastore calls (Brock via Szehon)

HIVE-11891 - Add basic performance logging to metastore calls (Brock via Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/21861592
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/21861592
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/21861592

Branch: refs/heads/llap
Commit: 2186159209db49f2aeab06be7c38203fbbb5550c
Parents: 93a6627
Author: Brock Noland <br...@apache.org>
Authored: Sun Sep 20 15:49:01 2015 -0700
Committer: Brock Noland <br...@apache.org>
Committed: Sun Sep 20 15:49:01 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/log/PerfLogger.java   | 196 +++++++++++++++++++
 .../hive/metastore/RetryingHMSHandler.java      |  33 +++-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  11 +-
 .../hadoop/hive/ql/exec/MapJoinOperator.java    |   3 +-
 .../ql/exec/SparkHashTableSinkOperator.java     |   3 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |  11 +-
 .../hadoop/hive/ql/exec/spark/SparkPlan.java    |   3 +-
 .../hive/ql/exec/spark/SparkPlanGenerator.java  |   3 +-
 .../hive/ql/exec/spark/SparkRecordHandler.java  |   3 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |   2 +-
 .../ql/exec/spark/status/SparkJobMonitor.java   |   2 +-
 .../hive/ql/exec/tez/RecordProcessor.java       |   3 +-
 .../hive/ql/exec/tez/ReduceRecordProcessor.java |   1 -
 .../hive/ql/exec/tez/ReduceRecordSource.java    |   3 +-
 .../hadoop/hive/ql/exec/tez/TezJobMonitor.java  |   4 +-
 .../hadoop/hive/ql/exec/tez/TezProcessor.java   |   3 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |   2 +-
 .../hive/ql/io/CombineHiveInputFormat.java      |  10 +-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |   5 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   | 195 ------------------
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |   7 +-
 .../hive/ql/parse/spark/SparkCompiler.java      |   3 +-
 .../hadoop/hive/ql/session/SessionState.java    |  37 ++--
 23 files changed, 293 insertions(+), 250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
new file mode 100644
index 0000000..6263a6d
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.log;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * PerfLogger.
+ *
+ * Can be used to measure and log the time spent by a piece of code.
+ */
+public class PerfLogger {
+  public static final String ACQUIRE_READ_WRITE_LOCKS = "acquireReadWriteLocks";
+  public static final String COMPILE = "compile";
+  public static final String PARSE = "parse";
+  public static final String ANALYZE = "semanticAnalyze";
+  public static final String DO_AUTHORIZATION = "doAuthorization";
+  public static final String DRIVER_EXECUTE = "Driver.execute";
+  public static final String INPUT_SUMMARY = "getInputSummary";
+  public static final String GET_SPLITS = "getSplits";
+  public static final String RUN_TASKS = "runTasks";
+  public static final String SERIALIZE_PLAN = "serializePlan";
+  public static final String DESERIALIZE_PLAN = "deserializePlan";
+  public static final String CLONE_PLAN = "clonePlan";
+  public static final String TASK = "task.";
+  public static final String RELEASE_LOCKS = "releaseLocks";
+  public static final String PRUNE_LISTING = "prune-listing";
+  public static final String PARTITION_RETRIEVING = "partition-retrieving";
+  public static final String PRE_HOOK = "PreHook.";
+  public static final String POST_HOOK = "PostHook.";
+  public static final String FAILURE_HOOK = "FailureHook.";
+  public static final String DRIVER_RUN = "Driver.run";
+  public static final String TIME_TO_SUBMIT = "TimeToSubmit";
+  public static final String TEZ_SUBMIT_TO_RUNNING = "TezSubmitToRunningDag";
+  public static final String TEZ_BUILD_DAG = "TezBuildDag";
+  public static final String TEZ_SUBMIT_DAG = "TezSubmitDag";
+  public static final String TEZ_RUN_DAG = "TezRunDag";
+  public static final String TEZ_CREATE_VERTEX = "TezCreateVertex.";
+  public static final String TEZ_RUN_VERTEX = "TezRunVertex.";
+  public static final String TEZ_INITIALIZE_PROCESSOR = "TezInitializeProcessor";
+  public static final String TEZ_RUN_PROCESSOR = "TezRunProcessor";
+  public static final String TEZ_INIT_OPERATORS = "TezInitializeOperators";
+  public static final String LOAD_HASHTABLE = "LoadHashtable";
+
+  public static final String SPARK_SUBMIT_TO_RUNNING = "SparkSubmitToRunning";
+  public static final String SPARK_BUILD_PLAN = "SparkBuildPlan";
+  public static final String SPARK_BUILD_RDD_GRAPH = "SparkBuildRDDGraph";
+  public static final String SPARK_SUBMIT_JOB = "SparkSubmitJob";
+  public static final String SPARK_RUN_JOB = "SparkRunJob";
+  public static final String SPARK_CREATE_TRAN = "SparkCreateTran.";
+  public static final String SPARK_RUN_STAGE = "SparkRunStage.";
+  public static final String SPARK_INIT_OPERATORS = "SparkInitializeOperators";
+  public static final String SPARK_GENERATE_TASK_TREE = "SparkGenerateTaskTree";
+  public static final String SPARK_OPTIMIZE_OPERATOR_TREE = "SparkOptimizeOperatorTree";
+  public static final String SPARK_OPTIMIZE_TASK_TREE = "SparkOptimizeTaskTree";
+  public static final String SPARK_FLUSH_HASHTABLE = "SparkFlushHashTable.";
+
+  protected final Map<String, Long> startTimes = new HashMap<String, Long>();
+  protected final Map<String, Long> endTimes = new HashMap<String, Long>();
+
+  static final private Log LOG = LogFactory.getLog(PerfLogger.class.getName());
+  protected static final ThreadLocal<PerfLogger> perfLogger = new ThreadLocal<PerfLogger>();
+
+
+  public PerfLogger() {
+    // Use getPerfLogger to get an instance of PerfLogger
+  }
+
+  public static PerfLogger getPerfLogger(HiveConf conf, boolean resetPerfLogger) {
+    PerfLogger result = perfLogger.get();
+    if (resetPerfLogger || result == null) {
+      if (conf == null) {
+        result = new PerfLogger();
+      } else {
+        try {
+          result = (PerfLogger) ReflectionUtils.newInstance(conf.getClassByName(
+            conf.getVar(HiveConf.ConfVars.HIVE_PERF_LOGGER)), conf);
+        } catch (ClassNotFoundException e) {
+          LOG.error("Performance Logger Class not found:" + e.getMessage());
+          result = new PerfLogger();
+        }
+      }
+      perfLogger.set(result);
+    }
+    return result;
+  }
+
+  /**
+   * Call this function when you start to measure time spent by a piece of code.
+   * @param callerName the logging object to be used.
+   * @param method method or ID that identifies this perf log element.
+   */
+  public void PerfLogBegin(String callerName, String method) {
+    long startTime = System.currentTimeMillis();
+    LOG.info("<PERFLOG method=" + method + " from=" + callerName + ">");
+    startTimes.put(method, new Long(startTime));
+  }
+  /**
+   * Call this function in correspondence of PerfLogBegin to mark the end of the measurement.
+   * @param callerName
+   * @param method
+   * @return long duration  the difference between now and startTime, or -1 if startTime is null
+   */
+  public long PerfLogEnd(String callerName, String method) {
+    return PerfLogEnd(callerName, method, null);
+  }
+
+  /**
+   * Call this function in correspondence of PerfLogBegin to mark the end of the measurement.
+   * @param callerName
+   * @param method
+   * @return long duration  the difference between now and startTime, or -1 if startTime is null
+   */
+  public long PerfLogEnd(String callerName, String method, String additionalInfo) {
+    Long startTime = startTimes.get(method);
+    long endTime = System.currentTimeMillis();
+    long duration = -1;
+
+    endTimes.put(method, new Long(endTime));
+
+    StringBuilder sb = new StringBuilder("</PERFLOG method=").append(method);
+    if (startTime != null) {
+      sb.append(" start=").append(startTime);
+    }
+    sb.append(" end=").append(endTime);
+    if (startTime != null) {
+      duration = endTime - startTime.longValue();
+      sb.append(" duration=").append(duration);
+    }
+    sb.append(" from=").append(callerName);
+    if (additionalInfo != null) {
+      sb.append(" ").append(additionalInfo);
+    }
+    sb.append(">");
+    LOG.info(sb);
+
+    return duration;
+  }
+
+  public Long getStartTime(String method) {
+    long startTime = 0L;
+
+    if (startTimes.containsKey(method)) {
+      startTime = startTimes.get(method);
+    }
+    return startTime;
+  }
+
+  public Long getEndTime(String method) {
+    long endTime = 0L;
+
+    if (endTimes.containsKey(method)) {
+      endTime = endTimes.get(method);
+    }
+    return endTime;
+  }
+
+  public boolean startTimeHasMethod(String method) {
+    return startTimes.containsKey(method);
+  }
+
+  public boolean endTimeHasMethod(String method) {
+    return endTimes.containsKey(method);
+  }
+
+  public Long getDuration(String method) {
+    long duration = 0;
+    if (startTimes.containsKey(method) && endTimes.containsKey(method)) {
+      duration = endTimes.get(method) - startTimes.get(method);
+    }
+    return duration;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
index 892aef4..56276b6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.datanucleus.exceptions.NucleusException;
 
 @InterfaceAudience.Private
@@ -41,6 +42,17 @@ import org.datanucleus.exceptions.NucleusException;
 public class RetryingHMSHandler implements InvocationHandler {
 
   private static final Log LOG = LogFactory.getLog(RetryingHMSHandler.class);
+  private static final String CLASS_NAME = RetryingHMSHandler.class.getName();
+
+  private static class Result {
+    private final Object result;
+    private final int numRetries;
+
+    public Result(Object result, int numRetries) {
+      this.result = result;
+      this.numRetries = numRetries;
+    }
+  }
 
   private final IHMSHandler baseHandler;
   private final MetaStoreInit.MetaStoreInitData metaStoreInitData =
@@ -78,6 +90,25 @@ public class RetryingHMSHandler implements InvocationHandler {
 
   @Override
   public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable {
+    int retryCount = -1;
+    int threadId = HiveMetaStore.HMSHandler.get();
+    boolean error = true;
+    PerfLogger perfLogger = PerfLogger.getPerfLogger(origConf, false);
+    perfLogger.PerfLogBegin(CLASS_NAME, method.getName());
+    try {
+      Result result = invokeInternal(proxy, method, args);
+      retryCount = result.numRetries;
+      error = false;
+      return result.result;
+    } finally {
+      StringBuffer additionalInfo = new StringBuffer();
+      additionalInfo.append("threadId=").append(threadId).append(" retryCount=").append(retryCount)
+        .append(" error=").append(error);
+      perfLogger.PerfLogEnd(CLASS_NAME, method.getName(), additionalInfo.toString());
+    }
+  }
+
+  public Result invokeInternal(final Object proxy, final Method method, final Object[] args) throws Throwable {
 
     boolean gotNewConnectUrl = false;
     boolean reloadConf = HiveConf.getBoolVar(origConf,
@@ -106,7 +137,7 @@ public class RetryingHMSHandler implements InvocationHandler {
         Deadline.startTimer(method.getName());
         Object object = method.invoke(baseHandler, args);
         Deadline.stopTimer();
-        return object;
+        return new Result(object, retryCount);
 
       } catch (javax.jdo.JDOException e) {
         caughtException = e;

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 4030075..43159c6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -364,7 +364,7 @@ public class Driver implements CommandProcessor {
    * @return 0 for ok
    */
   public int compile(String command, boolean resetTaskIds) {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.COMPILE);
 
     //holder for parent command type/string when executing reentrant queries
@@ -953,7 +953,7 @@ public class Driver implements CommandProcessor {
    * @param startTxnImplicitly in AC=false, the 1st DML starts a txn
    **/
   private int acquireLocksAndOpenTxn(boolean startTxnImplicitly) {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS);
 
     SessionState ss = SessionState.get();
@@ -1039,7 +1039,7 @@ public class Driver implements CommandProcessor {
    **/
   private void releaseLocksAndCommitOrRollback(List<HiveLock> hiveLocks, boolean commit)
       throws LockException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RELEASE_LOCKS);
 
     SessionState ss = SessionState.get();
@@ -1194,7 +1194,7 @@ public class Driver implements CommandProcessor {
     }
 
     // Reset the perf logger
-    PerfLogger perfLogger = PerfLogger.getPerfLogger(true);
+    PerfLogger perfLogger = SessionState.getPerfLogger(true);
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_RUN);
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.TIME_TO_SUBMIT);
 
@@ -1282,7 +1282,6 @@ public class Driver implements CommandProcessor {
     }
 
     perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DRIVER_RUN);
-    perfLogger.close(LOG, plan);
 
     // Take all the driver run hooks and post-execute them.
     try {
@@ -1406,7 +1405,7 @@ public class Driver implements CommandProcessor {
   }
 
   public int execute() throws CommandNeedRetryException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_EXECUTE);
     boolean noName = StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
     int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index a9159a5..02d61eb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hive.ql.plan.JoinDesc;
 import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -76,7 +77,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator<MapJoinDesc> implem
   private static final long serialVersionUID = 1L;
   private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName());
   private static final String CLASS_NAME = MapJoinOperator.class.getName();
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private transient String cacheKey;
   private transient ObjectCache cache;

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
index aa8808a..af368eb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.SparkBucketMapJoinContext;
 import org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 public class SparkHashTableSinkOperator
@@ -48,7 +49,7 @@ public class SparkHashTableSinkOperator
   private static final int MIN_REPLICATION = 10;
   private static final long serialVersionUID = 1L;
   private final String CLASS_NAME = this.getClass().getName();
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
   protected static final Log LOG = LogFactory.getLog(SparkHashTableSinkOperator.class.getName());
 
   private final HashTableSinkOperator htsOperator;

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index ca86301..bcf85a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -82,7 +82,6 @@ import java.util.zip.DeflaterOutputStream;
 import java.util.zip.InflaterInputStream;
 
 import org.antlr.runtime.CommonToken;
-import org.apache.calcite.util.ChunkList;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.WordUtils;
@@ -936,7 +935,7 @@ public final class Utilities {
   }
 
   private static void serializePlan(Object plan, OutputStream out, Configuration conf, boolean cloningPlan) {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SERIALIZE_PLAN);
     String serializationType = conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
     LOG.info("Serializing " + plan.getClass().getSimpleName() + " via " + serializationType);
@@ -962,7 +961,7 @@ public final class Utilities {
   }
 
   private static <T> T deserializePlan(InputStream in, Class<T> planClass, Configuration conf, boolean cloningPlan) {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DESERIALIZE_PLAN);
     T plan;
     String serializationType = conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
@@ -997,7 +996,7 @@ public final class Utilities {
    */
   public static MapredWork clonePlan(MapredWork plan) {
     // TODO: need proper clone. Meanwhile, let's at least keep this horror in one place
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.CLONE_PLAN);
     ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
     Configuration conf = new HiveConf();
@@ -1014,7 +1013,7 @@ public final class Utilities {
    * @return The clone.
    */
   public static BaseWork cloneBaseWork(BaseWork plan) {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.CLONE_PLAN);
     ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
     Configuration conf = new HiveConf();
@@ -2530,7 +2529,7 @@ public final class Utilities {
    */
   public static ContentSummary getInputSummary(final Context ctx, MapWork work, PathFilter filter)
       throws IOException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.INPUT_SUMMARY);
 
     long[] summary = {0, 0, 0};

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
index daf9698..9906118 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
@@ -39,7 +40,7 @@ import com.google.common.base.Preconditions;
 public class SparkPlan {
   private static final String CLASS_NAME = SparkPlan.class.getName();
   private static final Log LOG = LogFactory.getLog(SparkPlan.class);
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private final Set<SparkTran> rootTrans = new HashSet<SparkTran>();
   private final Set<SparkTran> leafTrans = new HashSet<SparkTran>();

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
index 762ce7d..4c3ee4b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper;
 import org.apache.hadoop.hive.ql.io.merge.MergeFileOutputFormat;
 import org.apache.hadoop.hive.ql.io.merge.MergeFileWork;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.Partitioner;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -61,7 +62,7 @@ import org.apache.spark.api.java.JavaSparkContext;
 @SuppressWarnings("rawtypes")
 public class SparkPlanGenerator {
   private static final String CLASS_NAME = SparkPlanGenerator.class.getName();
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
   private static final Log LOG = LogFactory.getLog(SparkPlanGenerator.class);
 
   private JavaSparkContext sc;

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
index 97b3471..3d37753 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
@@ -22,6 +22,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
@@ -35,7 +36,7 @@ import java.util.Iterator;
 
 public abstract class SparkRecordHandler {
   protected static final String CLASS_NAME = SparkRecordHandler.class.getName();
-  protected final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  protected final PerfLogger perfLogger = SessionState.getPerfLogger();
   private static final Log LOG = LogFactory.getLog(SparkRecordHandler.class);
 
   // used to log memory usage periodically

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index a36dc6e..eac812f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -80,7 +80,7 @@ public class SparkTask extends Task<SparkWork> {
   private static final String CLASS_NAME = SparkTask.class.getName();
   private static final Log LOG = LogFactory.getLog(CLASS_NAME);
   private static final LogHelper console = new LogHelper(LOG);
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
   private static final long serialVersionUID = 1L;
   private SparkCounters sparkCounters;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
index 3fceeb0..6fc20c7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
@@ -38,7 +38,7 @@ abstract class SparkJobMonitor {
   protected static final String CLASS_NAME = SparkJobMonitor.class.getName();
   protected static final Log LOG = LogFactory.getLog(CLASS_NAME);
   protected static SessionState.LogHelper console = new SessionState.LogHelper(LOG);
-  protected final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  protected final PerfLogger perfLogger = SessionState.getPerfLogger();
   protected final int checkInterval = 1000;
   protected final long monitorTimeoutInteval;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
index c563d9d..87fded1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.BaseWork;
 import org.apache.hadoop.hive.ql.plan.MapWork;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.tez.mapreduce.processor.MRTaskReporter;
@@ -64,7 +65,7 @@ public abstract class RecordProcessor  {
   protected boolean isLogTraceEnabled = false;
   protected MRTaskReporter reporter;
 
-  protected PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  protected PerfLogger perfLogger = SessionState.getPerfLogger();
   protected String CLASS_NAME = RecordProcessor.class.getName();
 
   public RecordProcessor(JobConf jConf, ProcessorContext processorContext) {

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
index d649672..91ba2bb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
@@ -51,7 +51,6 @@ import org.apache.tez.runtime.api.LogicalInput;
 import org.apache.tez.runtime.api.LogicalOutput;
 import org.apache.tez.runtime.api.ProcessorContext;
 import org.apache.tez.runtime.api.Reader;
-import org.apache.tez.runtime.library.api.KeyValuesReader;
 
 /**
  * Process input from tez LogicalInput and write output - for a map plan

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
index 89f7572..1f2f9f9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterF
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -115,7 +116,7 @@ public class ReduceRecordSource implements RecordSource {
 
   private ObjectInspector valueObjectInspector;
 
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private Iterable<Object> valueWritables;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
index 1e1603b..754c332 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
@@ -61,8 +61,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import jline.TerminalFactory;
-
 /**
  * TezJobMonitor keeps track of a tez job while it's being executed. It will
  * print status to the console and retrieve final status of the job after
@@ -100,7 +98,7 @@ public class TezJobMonitor {
   private String separator;
 
   private transient LogHelper console;
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
   private final int checkInterval = 200;
   private final int maxRetryInterval = 2500;
   private final int printInterval = 3000;

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
index 39f9db6..fad79f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.util.StringUtils;
@@ -54,7 +55,7 @@ public class TezProcessor extends AbstractLogicalIOProcessor {
   protected JobConf jobConf;
 
   private static final String CLASS_NAME = TezProcessor.class.getName();
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   protected ProcessorContext processorContext;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
index 4a1a712..2d740ed 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
@@ -77,7 +77,7 @@ import org.json.JSONObject;
 public class TezTask extends Task<TezWork> {
 
   private static final String CLASS_NAME = TezTask.class.getName();
-  private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
+  private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private TezCounters counters;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
index 11740d1..53bc1fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
@@ -25,11 +25,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
@@ -39,11 +36,9 @@ import java.util.concurrent.Future;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -52,19 +47,18 @@ import org.apache.hadoop.hive.ql.parse.SplitSample;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.HadoopShims.CombineFileInputFormatShim;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure.InputSplitShim;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 
 
@@ -462,7 +456,7 @@ public class CombineHiveInputFormat<K extends WritableComparable, V extends Writ
    */
   @Override
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS);
     init(job);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index fd16b35..1ac1669 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.spark.SparkDynamicPartitionPruner;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -46,10 +45,10 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.MapWork;
-import org.apache.hadoop.hive.ql.plan.MergeJoinWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
@@ -351,7 +350,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
   }
 
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS);
     init(job);
     Path[] dirs = getInputPaths(job);

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
deleted file mode 100644
index 20ca195..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.log;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.session.SessionState;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * PerfLogger.
- *
- * Can be used to measure and log the time spent by a piece of code.
- */
-public class PerfLogger {
-  public static final String ACQUIRE_READ_WRITE_LOCKS = "acquireReadWriteLocks";
-  public static final String COMPILE = "compile";
-  public static final String PARSE = "parse";
-  public static final String ANALYZE = "semanticAnalyze";
-  public static final String DO_AUTHORIZATION = "doAuthorization";
-  public static final String DRIVER_EXECUTE = "Driver.execute";
-  public static final String INPUT_SUMMARY = "getInputSummary";
-  public static final String GET_SPLITS = "getSplits";
-  public static final String RUN_TASKS = "runTasks";
-  public static final String SERIALIZE_PLAN = "serializePlan";
-  public static final String DESERIALIZE_PLAN = "deserializePlan";
-  public static final String CLONE_PLAN = "clonePlan";
-  public static final String TASK = "task.";
-  public static final String RELEASE_LOCKS = "releaseLocks";
-  public static final String PRUNE_LISTING = "prune-listing";
-  public static final String PARTITION_RETRIEVING = "partition-retrieving";
-  public static final String PRE_HOOK = "PreHook.";
-  public static final String POST_HOOK = "PostHook.";
-  public static final String FAILURE_HOOK = "FailureHook.";
-  public static final String DRIVER_RUN = "Driver.run";
-  public static final String TIME_TO_SUBMIT = "TimeToSubmit";
-  public static final String TEZ_SUBMIT_TO_RUNNING = "TezSubmitToRunningDag";
-  public static final String TEZ_BUILD_DAG = "TezBuildDag";
-  public static final String TEZ_SUBMIT_DAG = "TezSubmitDag";
-  public static final String TEZ_RUN_DAG = "TezRunDag";
-  public static final String TEZ_CREATE_VERTEX = "TezCreateVertex.";
-  public static final String TEZ_RUN_VERTEX = "TezRunVertex.";
-  public static final String TEZ_INITIALIZE_PROCESSOR = "TezInitializeProcessor";
-  public static final String TEZ_RUN_PROCESSOR = "TezRunProcessor";
-  public static final String TEZ_INIT_OPERATORS = "TezInitializeOperators";
-  public static final String LOAD_HASHTABLE = "LoadHashtable";
-
-  public static final String SPARK_SUBMIT_TO_RUNNING = "SparkSubmitToRunning";
-  public static final String SPARK_BUILD_PLAN = "SparkBuildPlan";
-  public static final String SPARK_BUILD_RDD_GRAPH = "SparkBuildRDDGraph";
-  public static final String SPARK_SUBMIT_JOB = "SparkSubmitJob";
-  public static final String SPARK_RUN_JOB = "SparkRunJob";
-  public static final String SPARK_CREATE_TRAN = "SparkCreateTran.";
-  public static final String SPARK_RUN_STAGE = "SparkRunStage.";
-  public static final String SPARK_INIT_OPERATORS = "SparkInitializeOperators";
-  public static final String SPARK_GENERATE_TASK_TREE = "SparkGenerateTaskTree";
-  public static final String SPARK_OPTIMIZE_OPERATOR_TREE = "SparkOptimizeOperatorTree";
-  public static final String SPARK_OPTIMIZE_TASK_TREE = "SparkOptimizeTaskTree";
-  public static final String SPARK_FLUSH_HASHTABLE = "SparkFlushHashTable.";
-
-  protected static final ThreadLocal<PerfLogger> perfLogger = new ThreadLocal<PerfLogger>();
-
-  protected final Map<String, Long> startTimes = new HashMap<String, Long>();
-  protected final Map<String, Long> endTimes = new HashMap<String, Long>();
-
-  static final private Log LOG = LogFactory.getLog(PerfLogger.class.getName());
-
-  public PerfLogger() {
-    // Use getPerfLogger to get an instance of PerfLogger
-  }
-
-  public static PerfLogger getPerfLogger() {
-    return getPerfLogger(false);
-  }
-
-  /**
-   * Call this function to get an instance of PerfLogger.
-   *
-   * Use resetPerfLogger to require a new instance.  Useful at the beginning of execution.
-   *
-   * @return Session perflogger if there's a sessionstate, otherwise return the thread local instance
-   */
-  public static PerfLogger getPerfLogger(boolean resetPerfLogger) {
-    if (SessionState.get() == null) {
-      if (perfLogger.get() == null || resetPerfLogger) {
-        perfLogger.set(new PerfLogger());
-      }
-      return perfLogger.get();
-    } else {
-      return SessionState.get().getPerfLogger(resetPerfLogger);
-    }
-  }
-
-  /**
-   * Call this function when you start to measure time spent by a piece of code.
-   * @param _log the logging object to be used.
-   * @param method method or ID that identifies this perf log element.
-   */
-  public void PerfLogBegin(String callerName, String method) {
-    long startTime = System.currentTimeMillis();
-    LOG.info("<PERFLOG method=" + method + " from=" + callerName + ">");
-    startTimes.put(method, new Long(startTime));
-  }
-
-  /**
-   * Call this function in correspondence of PerfLogBegin to mark the end of the measurement.
-   * @param _log
-   * @param method
-   * @return long duration  the difference between now and startTime, or -1 if startTime is null
-   */
-  public long PerfLogEnd(String callerName, String method) {
-    Long startTime = startTimes.get(method);
-    long endTime = System.currentTimeMillis();
-    long duration = -1;
-
-    endTimes.put(method, new Long(endTime));
-
-    StringBuilder sb = new StringBuilder("</PERFLOG method=").append(method);
-    if (startTime != null) {
-      sb.append(" start=").append(startTime);
-    }
-    sb.append(" end=").append(endTime);
-    if (startTime != null) {
-      duration = endTime - startTime.longValue();
-      sb.append(" duration=").append(duration);
-    }
-    sb.append(" from=").append(callerName).append(">");
-    LOG.info(sb);
-
-    return duration;
-  }
-
-  /**
-   * Call this function at the end of processing a query (any time after the last call to PerfLogEnd
-   * for a given query) to run any cleanup/final steps that need to be run
-   * @param _log
-   */
-  public void close(Log _log, QueryPlan queryPlan) {
-
-  }
-
-  public Long getStartTime(String method) {
-    long startTime = 0L;
-
-    if (startTimes.containsKey(method)) {
-      startTime = startTimes.get(method);
-    }
-    return startTime;
-  }
-
-  public Long getEndTime(String method) {
-    long endTime = 0L;
-
-    if (endTimes.containsKey(method)) {
-      endTime = endTimes.get(method);
-    }
-    return endTime;
-  }
-
-  public boolean startTimeHasMethod(String method) {
-    return startTimes.containsKey(method);
-  }
-
-  public boolean endTimeHasMethod(String method) {
-    return endTimes.containsKey(method);
-  }
-
-  public Long getDuration(String method) {
-    long duration = 0;
-    if (startTimes.containsKey(method) && endTimes.containsKey(method)) {
-      duration = endTimes.get(method) - startTimes.get(method);
-    }
-    return duration;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
index d264559..8eab603 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
@@ -400,7 +401,7 @@ public class PartitionPruner implements Transform {
       // Now filter.
       List<Partition> partitions = new ArrayList<Partition>();
       boolean hasUnknownPartitions = false;
-      PerfLogger perfLogger = PerfLogger.getPerfLogger();
+      PerfLogger perfLogger = SessionState.getPerfLogger();
       if (!doEvalClientSide) {
         perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
         try {
@@ -432,7 +433,7 @@ public class PartitionPruner implements Transform {
   }
 
   private static Set<Partition> getAllPartitions(Table tab) throws HiveException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
     Set<Partition> result = Hive.get().getAllPartitionsOf(tab);
     perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
@@ -450,7 +451,7 @@ public class PartitionPruner implements Transform {
    */
   static private boolean pruneBySequentialScan(Table tab, List<Partition> partitions,
       ExprNodeGenericFuncDesc prunerExpr, HiveConf conf) throws HiveException, MetaException {
-    PerfLogger perfLogger = PerfLogger.getPerfLogger();
+    PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PRUNE_LISTING);
 
     List<String> partNames = Hive.get().getPartitionNames(

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
index 27a1d99..9ec7fd6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
@@ -87,6 +87,7 @@ import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.SparkWork;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * SparkCompiler translates the operator plan into SparkTasks.
@@ -95,7 +96,7 @@ import org.apache.hadoop.hive.ql.plan.SparkWork;
  */
 public class SparkCompiler extends TaskCompiler {
   private static final String CLASS_NAME = SparkCompiler.class.getName();
-  private static final PerfLogger PERF_LOGGER = PerfLogger.getPerfLogger();
+  private static final PerfLogger PERF_LOGGER = SessionState.getPerfLogger();
   private static final Log LOGGER = LogFactory.getLog(SparkCompiler.class);
 
   public SparkCompiler() {

http://git-wip-us.apache.org/repos/asf/hive/blob/21861592/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 7ed8e5f..5f528167 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -104,6 +104,7 @@ public class SessionState {
   private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path";
   private static final String HDFS_SESSION_PATH_KEY = "_hive.hdfs.session.path";
   private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space";
+
   private final Map<String, Map<String, Table>> tempTables = new HashMap<String, Map<String, Table>>();
   private final Map<String, Map<String, ColumnStatisticsObj>> tempTableColStats =
       new HashMap<String, Map<String, ColumnStatisticsObj>>();
@@ -596,7 +597,7 @@ public class SessionState {
    * Create a given path if it doesn't exist.
    *
    * @param conf
-   * @param pathString
+   * @param path
    * @param permission
    * @param isLocal
    * @param isCleanUp
@@ -1523,25 +1524,37 @@ public class SessionState {
   }
 
   /**
-   * @param resetPerfLogger
    * @return  Tries to return an instance of the class whose name is configured in
    *          hive.exec.perf.logger, but if it can't it just returns an instance of
    *          the base PerfLogger class
+   *
+   */
+  public static PerfLogger getPerfLogger() {
+    return getPerfLogger(false);
+  }
 
+  /**
+   * @param resetPerfLogger
+   * @return  Tries to return an instance of the class whose name is configured in
+   *          hive.exec.perf.logger, but if it can't it just returns an instance of
+   *          the base PerfLogger class
+   *
    */
-  public PerfLogger getPerfLogger(boolean resetPerfLogger) {
-    if ((perfLogger == null) || resetPerfLogger) {
-      try {
-        perfLogger = (PerfLogger) ReflectionUtils.newInstance(conf.getClassByName(
-            conf.getVar(ConfVars.HIVE_PERF_LOGGER)), conf);
-      } catch (ClassNotFoundException e) {
-        LOG.error("Performance Logger Class not found:" + e.getMessage());
-        perfLogger = new PerfLogger();
-      }
+  public static PerfLogger getPerfLogger(boolean resetPerfLogger) {
+    SessionState ss = get();
+    if (ss == null) {
+      return PerfLogger.getPerfLogger(null, resetPerfLogger);
+    } else if (ss.perfLogger != null && !resetPerfLogger) {
+      return ss.perfLogger;
+    } else {
+      PerfLogger perfLogger = PerfLogger.getPerfLogger(ss.getConf(), resetPerfLogger);
+      ss.perfLogger = perfLogger;
+      return perfLogger;
     }
-    return perfLogger;
   }
 
+
+
   public TezSessionState getTezSession() {
     return tezSessionState;
   }