You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/10/28 16:45:57 UTC

[09/14] hive git commit: HIVE-12237 : Use slf4j as logging facade

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java
index c5d8aea..7fc3226 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java
@@ -21,8 +21,8 @@ import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryMXBean;
 import java.text.NumberFormat;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  * for HashTableSinkOperator.
  */
 public class MapJoinMemoryExhaustionHandler {
-  private static final Log LOG = LogFactory.getLog(MapJoinMemoryExhaustionHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapJoinMemoryExhaustionHandler.class);
 
   public final MemoryMXBean memoryMXBean;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index bed7d63..5cbf764 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -29,11 +29,10 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
-import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileStatus;
@@ -84,15 +83,12 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.Partitioner;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
 import org.apache.logging.log4j.core.Appender;
 import org.apache.logging.log4j.core.appender.FileAppender;
 import org.apache.logging.log4j.core.appender.RollingFileAppender;
@@ -115,7 +111,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
   public static MemoryMXBean memoryMXBean;
   protected HadoopJobExecHelper jobExecHelper;
 
-  protected static transient final Log LOG = LogFactory.getLog(ExecDriver.class);
+  protected static transient final Logger LOG = LoggerFactory.getLogger(ExecDriver.class);
 
   private RunningJob rj;
 
@@ -473,7 +469,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
           jobID = rj.getID().toString();
         }
       } catch (Exception e) {
-	LOG.warn(e);
+	LOG.warn("Failed while cleaning up ", e);
       } finally {
 	HadoopJobExecHelper.runningJobs.remove(rj);
       }
@@ -695,7 +691,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
     if (noLog) {
       // If started from main(), and noLog is on, we should not output
       // any logs. To turn the log on, please set -Dtest.silent=false
-      Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+      org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
       NullAppender appender = NullAppender.createNullAppender();
       appender.addToLogger(logger.getName(), Level.ERROR);
       appender.start();
@@ -703,7 +699,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
       setupChildLog4j(conf);
     }
 
-    Log LOG = LogFactory.getLog(ExecDriver.class.getName());
+    Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName());
     LogHelper console = new LogHelper(LOG, isSilent);
 
     if (planFileName == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
index 1196ae8..23497a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
@@ -24,8 +24,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.MapOperator;
@@ -58,28 +58,22 @@ import org.apache.hadoop.util.StringUtils;
  */
 public class ExecMapper extends MapReduceBase implements Mapper {
 
-  private static final String PLAN_KEY = "__MAP_PLAN__";
   private MapOperator mo;
   private OutputCollector oc;
   private JobConf jc;
   private boolean abort = false;
   private Reporter rp;
-  public static final Log l4j = LogFactory.getLog(ExecMapper.class);
+  public static final Logger l4j = LoggerFactory.getLogger(ExecMapper.class);
   private static boolean done;
 
   private MapredLocalWork localWork = null;
-  private boolean isLogInfoEnabled = false;
-
   private ExecMapperContext execContext = null;
 
   @Override
   public void configure(JobConf job) {
     execContext = new ExecMapperContext(job);
     // Allocate the bean at the beginning -
-
-    isLogInfoEnabled = l4j.isInfoEnabled();
-
-    try {
+      try {
       l4j.info("conf classpath = "
           + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs()));
       l4j.info("thread classpath = "
@@ -168,7 +162,7 @@ public class ExecMapper extends MapReduceBase implements Mapper {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        l4j.fatal(StringUtils.stringifyException(e));
+        l4j.error(StringUtils.stringifyException(e));
         throw new RuntimeException(e);
       }
     }
@@ -237,12 +231,10 @@ public class ExecMapper extends MapReduceBase implements Mapper {
    */
   public static class ReportStats implements Operator.OperatorFunc {
     private final Reporter rp;
-    private final Configuration conf;
     private final String groupName;
 
     public ReportStats(Reporter rp, Configuration conf) {
       this.rp = rp;
-      this.conf = conf;
       this.groupName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVECOUNTERGROUP);
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
index fc5abfe..8f397fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.io.IOContext;
@@ -29,8 +28,6 @@ import org.apache.hadoop.mapred.JobConf;
 
 public class ExecMapperContext {
 
-  public static final Log l4j = ExecMapper.l4j;
-
   // lastInputPath should be changed by the root of the operator tree ExecMapper.map()
   // but kept unchanged throughout the operator tree for one row
   private Path lastInputPath = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
index 6b13ea5..1dffff2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
@@ -27,8 +27,8 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -63,7 +63,7 @@ import org.apache.hadoop.util.StringUtils;
  */
 public class ExecReducer extends MapReduceBase implements Reducer {
 
-  private static final Log LOG = LogFactory.getLog("ExecReducer");
+  private static final Logger LOG = LoggerFactory.getLogger("ExecReducer");
   private static final boolean isInfoEnabled = LOG.isInfoEnabled();
   private static final boolean isTraceEnabled = LOG.isTraceEnabled();
   private static final String PLAN_KEY = "__REDUCE_PLAN__";
@@ -253,7 +253,7 @@ public class ExecReducer extends MapReduceBase implements Reducer {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        LOG.fatal(StringUtils.stringifyException(e));
+        LOG.error(StringUtils.stringifyException(e));
         throw new RuntimeException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
index abf38e4..1070384 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
@@ -24,8 +24,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileSystem;
@@ -52,7 +52,7 @@ import org.apache.hadoop.mapred.JobConf;
  */
 public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader {
 
-  private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(MapJoinOperator.class.getName());
 
   private ExecMapperContext context;
   private Configuration hconf;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index a5c1463..bfe21db 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -33,8 +33,8 @@ import java.util.Properties;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -83,12 +83,12 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab
   private final Map<String, FetchOperator> fetchOperators = new HashMap<String, FetchOperator>();
   protected HadoopJobExecHelper jobExecHelper;
   private JobConf job;
-  public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class);
+  public static transient final Logger l4j = LoggerFactory.getLogger(MapredLocalTask.class);
   static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE";
   static final String HADOOP_OPTS_KEY = "HADOOP_OPTS";
   static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"};
   public static MemoryMXBean memoryMXBean;
-  private static final Log LOG = LogFactory.getLog(MapredLocalTask.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapredLocalTask.class);
 
   // not sure we need this exec context; but all the operators in the work
   // will pass this context throught

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java
index bf4ae8d..7baf9b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java
@@ -24,8 +24,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
  */
 public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache {
 
-  private static final Log LOG = LogFactory.getLog(ObjectCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class.getName());
   private static final boolean isInfoEnabled = LOG.isInfoEnabled();
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java
index f582c39..a522493 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java
@@ -23,7 +23,7 @@ import java.io.InputStream;
 import java.net.URL;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.JobConf;
 
@@ -44,7 +44,7 @@ public final class Throttle {
   /**
    * Fetch http://tracker.om:/gc.jsp?threshold=period.
    */
-  public static void checkJobTracker(JobConf conf, Log LOG) {
+  public static void checkJobTracker(JobConf conf, Logger LOG) {
 
     try {
       byte[] buffer = new byte[1024];

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java
index 77c7ead..51acae0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java
@@ -24,8 +24,8 @@ import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.debug.Utils;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -46,7 +46,7 @@ import com.google.common.annotations.VisibleForTesting;
  * and there's very little in common left save for quadratic probing (and that with some changes).
  */
 public final class BytesBytesMultiHashMap {
-  public static final Log LOG = LogFactory.getLog(BytesBytesMultiHashMap.class);
+  public static final Logger LOG = LoggerFactory.getLogger(BytesBytesMultiHashMap.class);
 
   /*
    * This hashtable stores "references" in an array of longs;  index in the array is hash of

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
index 70f5605..c491df3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
@@ -28,8 +28,8 @@ import java.util.List;
 import java.util.ListIterator;
 import java.util.NoSuchElementException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -45,7 +45,7 @@ public class FlatRowContainer extends AbstractCollection<Object>
   private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
   private static final int UNKNOWN = Integer.MAX_VALUE;
 
-  private static Log LOG = LogFactory.getLog(FlatRowContainer.class);
+  private static Logger LOG = LoggerFactory.getLogger(FlatRowContainer.class);
 
   /**
    * In lazy mode, 0s element contains context for deserialization and all the other

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java
index 3852380..2ca5c00 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java
@@ -26,8 +26,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
@@ -52,7 +52,7 @@ import org.apache.hadoop.io.Writable;
 
 public class HashMapWrapper extends AbstractMapJoinTableContainer implements Serializable {
   private static final long serialVersionUID = 1L;
-  protected static final Log LOG = LogFactory.getLog(HashMapWrapper.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(HashMapWrapper.class);
 
   // default threshold for using main memory based HashMap
   private static final int THRESHOLD = 1000000;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
index 625ba39..72b2cb0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
@@ -29,8 +29,8 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
@@ -72,7 +72,7 @@ import com.esotericsoftware.kryo.Kryo;
  */
 public class HybridHashTableContainer
       implements MapJoinTableContainer, MapJoinTableContainerDirectAccess {
-  private static final Log LOG = LogFactory.getLog(HybridHashTableContainer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HybridHashTableContainer.class);
 
   private final HashPartition[] hashPartitions; // an array of partitions holding the triplets
   private int totalInMemRowCount = 0;           // total number of small table rows in memory

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java
index d1bea48..d403c58 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java
@@ -21,8 +21,8 @@ import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -41,7 +41,7 @@ import java.io.IOException;
  */
 @SuppressWarnings("unchecked")
 public class KeyValueContainer {
-  private static final Log LOG = LogFactory.getLog(KeyValueContainer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(KeyValueContainer.class);
 
   @VisibleForTesting
   static final int IN_MEMORY_NUM_ROWS = 1024;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
index 1305f75..58d6a9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
@@ -24,8 +24,8 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
@@ -68,7 +68,7 @@ import org.apache.hadoop.io.Writable;
  */
 public class MapJoinBytesTableContainer
          implements MapJoinTableContainer, MapJoinTableContainerDirectAccess {
-  private static final Log LOG = LogFactory.getLog(MapJoinTableContainer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapJoinTableContainer.class);
 
   private final BytesBytesMultiHashMap hashMap;
   /** The OI used to deserialize values. We never deserialize keys. */

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
index 02f25e7..86cc9bd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
@@ -25,8 +25,8 @@ import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapperBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
index 7d7ce1d..6d391a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
@@ -22,8 +22,8 @@ import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -41,7 +41,7 @@ import java.io.IOException;
  */
 @SuppressWarnings("unchecked")
 public class ObjectContainer<ROW> {
-  private static final Log LOG = LogFactory.getLog(ObjectContainer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectContainer.class);
 
   @VisibleForTesting
   static final int IN_MEMORY_NUM_ROWS = 1024;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
index 68dc482..c2d0d68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -74,7 +74,7 @@ import org.apache.hive.common.util.ReflectionUtil;
 public class RowContainer<ROW extends List<Object>>
   implements AbstractRowContainer<ROW>, AbstractRowContainer.RowIterator<ROW> {
 
-  protected static Log LOG = LogFactory.getLog(RowContainer.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(RowContainer.class);
 
   // max # of rows can be put into one block
   private static final int BLOCKSIZE = 25000;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java
index 10e3497..39f9d40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java
@@ -22,8 +22,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -54,7 +54,7 @@ import org.apache.hadoop.mapred.JobConf;
  */
 public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader {
 
-  private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName());
 
   private ExecMapperContext context;
   private Configuration hconf;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
index 9db2e8d..c3e820d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
@@ -22,8 +22,8 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -44,7 +44,7 @@ import com.google.common.base.Preconditions;
  */
 @SuppressWarnings("unchecked")
 class HiveKVResultCache {
-  private static final Log LOG = LogFactory.getLog(HiveKVResultCache.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveKVResultCache.class);
 
   @VisibleForTesting
   static final int IN_MEMORY_NUM_ROWS = 1024;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index e12a97d..259c12f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -27,8 +27,8 @@ import java.util.Properties;
 import java.util.Set;
 
 import org.apache.commons.compress.utils.CharsetNames;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -43,7 +43,7 @@ import com.google.common.base.Strings;
 import com.google.common.collect.Sets;
 
 public class HiveSparkClientFactory {
-  protected static final transient Log LOG = LogFactory.getLog(HiveSparkClientFactory.class);
+  protected static final transient Logger LOG = LoggerFactory.getLogger(HiveSparkClientFactory.class);
 
   private static final String SPARK_DEFAULT_CONF_FILE = "spark-defaults.conf";
   private static final String SPARK_DEFAULT_MASTER = "yarn-cluster";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
index f1d7368..fd7109a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
@@ -24,8 +24,8 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 
 import org.apache.commons.io.output.ByteArrayOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.mapred.JobConf;
 
@@ -33,7 +33,7 @@ import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 
 public class KryoSerializer {
-  private static final Log LOG = LogFactory.getLog(KryoSerializer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(KryoSerializer.class);
 
   public static byte[] serialize(Object object) {
     ByteArrayOutputStream stream = new ByteArrayOutputStream();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
index 19d3fee..0c0fe95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -57,8 +57,8 @@ public class LocalHiveSparkClient implements HiveSparkClient {
   private static final long serialVersionUID = 1L;
 
   private static final String MR_JAR_PROPERTY = "tmpjars";
-  protected static final transient Log LOG = LogFactory
-      .getLog(LocalHiveSparkClient.class);
+  protected static final transient Logger LOG = LoggerFactory
+      .getLogger(LocalHiveSparkClient.class);
 
   private static final Splitter CSV_SPLITTER = Splitter.on(",").omitEmptyStrings();
 
@@ -71,13 +71,13 @@ public class LocalHiveSparkClient implements HiveSparkClient {
     return client;
   }
 
-  private JavaSparkContext sc;
+  private final JavaSparkContext sc;
 
-  private List<String> localJars = new ArrayList<String>();
+  private final List<String> localJars = new ArrayList<String>();
 
-  private List<String> localFiles = new ArrayList<String>();
+  private final List<String> localFiles = new ArrayList<String>();
 
-  private JobMetricsListener jobMetricsListener;
+  private final JobMetricsListener jobMetricsListener;
 
   private LocalHiveSparkClient(SparkConf sparkConf) {
     sc = new JavaSparkContext(sparkConf);

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
index 2e8d1d3..86b9d67 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
@@ -33,8 +33,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -71,7 +71,7 @@ public class RemoteHiveSparkClient implements HiveSparkClient {
   private static final long serialVersionUID = 1L;
 
   private static final String MR_JAR_PROPERTY = "tmpjars";
-  private static final transient Log LOG = LogFactory.getLog(RemoteHiveSparkClient.class);
+  private static final transient Logger LOG = LoggerFactory.getLogger(RemoteHiveSparkClient.class);
   private static final long MAX_PREWARM_TIME = 30000; // 30s
   private static final transient Splitter CSV_SPLITTER = Splitter.on(",").omitEmptyStrings();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java
index 1992d16..a838eae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java
@@ -19,15 +19,15 @@ package org.apache.hadoop.hive.ql.exec.spark;
 
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
 
 public class SmallTableCache {
-  private static final Log LOG = LogFactory.getLog(SmallTableCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(SmallTableCache.class.getName());
 
   private static final ConcurrentHashMap<Path, MapJoinTableContainer>
     tableContainerMap = new ConcurrentHashMap<Path, MapJoinTableContainer>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java
index 52913e0..b70be01 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java
@@ -32,8 +32,8 @@ import java.util.Set;
 
 import com.clearspring.analytics.util.Preconditions;
 import javolution.testing.AssertionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -62,7 +62,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  * The spark version of DynamicPartitionPruner.
  */
 public class SparkDynamicPartitionPruner {
-  private static final Log LOG = LogFactory.getLog(SparkDynamicPartitionPruner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkDynamicPartitionPruner.class);
   private final Map<String, List<SourceInfo>> sourceInfoMap = new LinkedHashMap<String, List<SourceInfo>>();
   private final BytesWritable writable = new BytesWritable();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
index bbfa245..62be3f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.MapOperator;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -53,8 +53,7 @@ import org.apache.hadoop.mapred.Reporter;
  *
  */
 public class SparkMapRecordHandler extends SparkRecordHandler {
-  private static final Log LOG = LogFactory.getLog(SparkMapRecordHandler.class);
-  private static final String PLAN_KEY = "__MAP_PLAN__";
+  private static final Logger LOG = LoggerFactory.getLogger(SparkMapRecordHandler.class);
   private MapOperator mo;
   private MapredLocalWork localWork = null;
   private boolean isLogInfoEnabled = false;
@@ -143,7 +142,7 @@ public class SparkMapRecordHandler extends SparkRecordHandler {
         throw (OutOfMemoryError) e;
       } else {
         String msg = "Error processing row: " + e;
-        LOG.fatal(msg, e);
+        LOG.error(msg, e);
         throw new RuntimeException(msg, e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java
index fdc8452..4af372a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec.spark;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -46,7 +46,7 @@ import com.google.common.base.Preconditions;
 public class SparkMergeFileRecordHandler extends SparkRecordHandler {
 
   private static final String PLAN_KEY = "__MAP_PLAN__";
-  private static final Log LOG = LogFactory.getLog(SparkMergeFileRecordHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkMergeFileRecordHandler.class);
   private Operator<? extends OperatorDesc> op;
   private AbstractFileMergeOperator<? extends FileMergeDesc> mergeOp;
   private Object[] row;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
index 9906118..9a2ab51 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
@@ -26,8 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -39,7 +39,7 @@ import com.google.common.base.Preconditions;
 @SuppressWarnings("rawtypes")
 public class SparkPlan {
   private static final String CLASS_NAME = SparkPlan.class.getName();
-  private static final Log LOG = LogFactory.getLog(SparkPlan.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkPlan.class);
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private final Set<SparkTran> rootTrans = new HashSet<SparkTran>();
@@ -131,7 +131,7 @@ public class SparkPlan {
     }
     sparkPlan
       .append(" \n\t!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Spark Plan !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ");
-    LOG.info(sparkPlan);
+    LOG.info(sparkPlan.toString());
   }
 
   private void collectLeafTrans(SparkTran leaf, List<SparkTran> reduceTrans) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
index 085ad9e..6951993 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper;
@@ -70,7 +70,7 @@ import com.google.common.base.Preconditions;
 public class SparkPlanGenerator {
   private static final String CLASS_NAME = SparkPlanGenerator.class.getName();
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
-  private static final Log LOG = LogFactory.getLog(SparkPlanGenerator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkPlanGenerator.class);
 
   private final JavaSparkContext sc;
   private final JobConf jobConf;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
index 3d37753..2421885 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -37,7 +37,7 @@ import java.util.Iterator;
 public abstract class SparkRecordHandler {
   protected static final String CLASS_NAME = SparkRecordHandler.class.getName();
   protected final PerfLogger perfLogger = SessionState.getPerfLogger();
-  private static final Log LOG = LogFactory.getLog(SparkRecordHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkRecordHandler.class);
 
   // used to log memory usage periodically
   protected final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
@@ -81,7 +81,7 @@ public abstract class SparkRecordHandler {
   public abstract <E> void processRow(Object key, Iterator<E> values) throws IOException;
 
   /**
-   * Log processed row number and used memory info.
+   * Logger processed row number and used memory info.
    */
   protected void logMemoryInfo() {
     rowNumber++;
@@ -97,7 +97,7 @@ public abstract class SparkRecordHandler {
   public abstract boolean getDone();
 
   /**
-   * Log information to be logged at the end.
+   * Logger information to be logged at the end.
    */
   protected void logCloseInfo() {
     long usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
index 4da02be..5fbefec 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
@@ -24,8 +24,8 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -70,7 +70,7 @@ import org.apache.hadoop.util.StringUtils;
  */
 public class SparkReduceRecordHandler extends SparkRecordHandler {
 
-  private static final Log LOG = LogFactory.getLog(SparkReduceRecordHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkReduceRecordHandler.class);
 
   // Input value serde needs to be an array to support different SerDe
   // for different tags
@@ -274,7 +274,7 @@ public class SparkReduceRecordHandler extends SparkRecordHandler {
         throw (OutOfMemoryError) e;
       } else {
         String msg = "Fatal error: " + e;
-        LOG.fatal(msg, e);
+        LOG.error(msg, e);
         throw new RuntimeException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index eac812f..336d490 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -28,8 +28,8 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -73,12 +73,14 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hive.spark.counter.SparkCounters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Lists;
 
 public class SparkTask extends Task<SparkWork> {
   private static final String CLASS_NAME = SparkTask.class.getName();
-  private static final Log LOG = LogFactory.getLog(CLASS_NAME);
+  private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   private static final LogHelper console = new LogHelper(LOG);
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
   private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
index 49e5f6c..f04e145 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec.spark.session;
 import java.io.IOException;
 import java.util.UUID;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -36,7 +36,7 @@ import org.apache.spark.util.Utils;
 import com.google.common.base.Preconditions;
 
 public class SparkSessionImpl implements SparkSession {
-  private static final Log LOG = LogFactory.getLog(SparkSession.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkSession.class);
 
   private HiveConf conf;
   private boolean isOpen;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java
index 616807c..75e5913 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java
@@ -24,8 +24,8 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.spark.HiveSparkClientFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -38,7 +38,7 @@ import org.apache.hive.spark.client.SparkClientFactory;
  *   - SparkSession is reused if the userName in new conf and user name in session conf match.
  */
 public class SparkSessionManagerImpl implements SparkSessionManager {
-  private static final Log LOG = LogFactory.getLog(SparkSessionManagerImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkSessionManagerImpl.class);
 
   private Set<SparkSession> createdSessions = Collections.synchronizedSet(new HashSet<SparkSession>());
   private volatile boolean inited = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
index 6fc20c7..0b6b15b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.spark.status;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -36,13 +36,13 @@ import java.util.concurrent.TimeUnit;
 abstract class SparkJobMonitor {
 
   protected static final String CLASS_NAME = SparkJobMonitor.class.getName();
-  protected static final Log LOG = LogFactory.getLog(CLASS_NAME);
+  protected static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   protected static SessionState.LogHelper console = new SessionState.LogHelper(LOG);
   protected final PerfLogger perfLogger = SessionState.getPerfLogger();
   protected final int checkInterval = 1000;
   protected final long monitorTimeoutInteval;
 
-  private Set<String> completed = new HashSet<String>();
+  private final Set<String> completed = new HashSet<String>();
   private final int printInterval = 3000;
   private long lastPrintTime;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java
index 51772cd..84603d5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java
@@ -21,8 +21,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.spark.executor.TaskMetrics;
 import org.apache.spark.scheduler.SparkListener;
 import org.apache.spark.scheduler.SparkListenerApplicationEnd;
@@ -47,7 +47,7 @@ import com.google.common.collect.Maps;
 
 public class JobMetricsListener implements SparkListener {
 
-  private static final Log LOG = LogFactory.getLog(JobMetricsListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(JobMetricsListener.class);
 
   private final Map<Integer, int[]> jobIdToStageId = Maps.newHashMap();
   private final Map<Integer, Integer> stageIdToJobId = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java
index c6f1b8d..ebc5c16 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java
@@ -22,8 +22,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder;
 import org.apache.hadoop.hive.ql.exec.spark.status.SparkJobStatus;
@@ -45,7 +45,7 @@ import com.google.common.collect.Maps;
 public class LocalSparkJobStatus implements SparkJobStatus {
 
   private final JavaSparkContext sparkContext;
-  private static final Log LOG = LogFactory.getLog(LocalSparkJobStatus.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(LocalSparkJobStatus.class.getName());
   private int jobId;
   // After SPARK-2321, we only use JobMetricsListener to get job metrics
   // TODO: remove it when the new API provides equivalent functionality

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
index 072bac9..e8d581f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.spark.status.impl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -50,7 +50,7 @@ import java.util.concurrent.TimeUnit;
  * Used with remove spark client.
  */
 public class RemoteSparkJobStatus implements SparkJobStatus {
-  private static final Log LOG = LogFactory.getLog(RemoteSparkJobStatus.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(RemoteSparkJobStatus.class.getName());
   private final SparkClient sparkClient;
   private final JobHandle<Serializable> jobHandle;
   private final transient long sparkClientTimeoutInSeconds;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java
index bf830eb..dfc778a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.tez;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.io.ColumnarSplit;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.mapred.InputSplit;
@@ -30,7 +30,7 @@ import org.apache.hadoop.mapred.split.SplitSizeEstimator;
  * Split size estimator for columnar file formats.
  */
 public class ColumnarSplitSizeEstimator implements SplitSizeEstimator {
-  private static final Log LOG = LogFactory.getLog(ColumnarSplitSizeEstimator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ColumnarSplitSizeEstimator.class);
   private static final boolean isDebugEnabled = LOG.isDebugEnabled();
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
index 6c3ba3a..cb3ae62 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java
@@ -24,8 +24,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.tez.dag.api.EdgeManagerPlugin;
 import org.apache.tez.dag.api.EdgeManagerPluginContext;
 import org.apache.tez.runtime.api.events.DataMovementEvent;
@@ -33,7 +33,7 @@ import org.apache.tez.runtime.api.events.InputReadErrorEvent;
 
 public class CustomPartitionEdge extends EdgeManagerPlugin {
 
-  private static final Log LOG = LogFactory.getLog(CustomPartitionEdge.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CustomPartitionEdge.class.getName());
 
   CustomEdgeConfiguration conf = null;
   final EdgeManagerPluginContext context;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
index 5f7b20b..e9f1c98 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
@@ -33,8 +33,8 @@ import java.util.TreeMap;
 import java.util.TreeSet;
 
 import com.google.common.collect.LinkedListMultimap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -105,7 +105,7 @@ public class CustomPartitionVertex extends VertexManagerPlugin {
     }
   }
 
-  private static final Log LOG = LogFactory.getLog(CustomPartitionVertex.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CustomPartitionVertex.class.getName());
 
   VertexManagerPluginContext context;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
index 46050e8..6dcfe8d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
@@ -39,8 +39,8 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -136,7 +136,7 @@ import org.apache.tez.runtime.library.input.ConcatenatedMergedKeyValueInput;
 public class DagUtils {
 
   public static final String TEZ_TMP_DIR_KEY = "_hive_tez_tmp_dir";
-  private static final Log LOG = LogFactory.getLog(DagUtils.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(DagUtils.class.getName());
   private static final String TEZ_DIR = "_tez_scratch_dir";
   private static DagUtils instance;
   // The merge file being currently processed.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java
index 7abd94d..b67ac8d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java
@@ -38,8 +38,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 
 import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -73,7 +73,7 @@ import org.apache.tez.runtime.api.events.InputInitializerEvent;
  */
 public class DynamicPartitionPruner {
 
-  private static final Log LOG = LogFactory.getLog(DynamicPartitionPruner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DynamicPartitionPruner.class);
 
   private final InputInitializerContext context;
   private final MapWork work;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
index 8a3647c..ff79110 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
@@ -24,8 +24,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -57,7 +57,7 @@ import org.apache.tez.runtime.library.api.KeyValueReader;
  */
 public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader {
 
-  private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName());
 
   private Configuration hconf;
   private MapJoinDesc desc;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java
index 52c36eb..c10e53d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.tez;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.common.JavaUtils;
@@ -53,7 +53,7 @@ public class HivePreWarmProcessor extends AbstractLogicalIOProcessor {
 
   private static boolean prewarmed = false;
 
-  private static final Log LOG = LogFactory.getLog(HivePreWarmProcessor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HivePreWarmProcessor.class);
 
   private Configuration conf;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
index 4019e7e..2ab3328 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
@@ -24,8 +24,8 @@ import java.util.Comparator;
 import java.util.List;
 
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hive.common.JavaUtils;
@@ -69,7 +69,7 @@ import com.google.common.collect.Multimap;
  */
 public class HiveSplitGenerator extends InputInitializer {
 
-  private static final Log LOG = LogFactory.getLog(HiveSplitGenerator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveSplitGenerator.class);
 
   private final DynamicPartitionPruner pruner;
   private final Configuration conf;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java
index a7936a4..6f77453 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java
@@ -27,8 +27,8 @@ import java.util.concurrent.Future;
 
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 import com.google.common.cache.Cache;
@@ -40,7 +40,7 @@ import com.google.common.cache.CacheBuilder;
  */
 public class LlapObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache {
 
-  private static final Log LOG = LogFactory.getLog(LlapObjectCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(LlapObjectCache.class.getName());
 
   private static ExecutorService staticPool = Executors.newCachedThreadPool();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
index c758000..948829b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
@@ -30,8 +30,8 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.Callable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.io.api.LlapIoProxy;
@@ -71,7 +71,7 @@ import org.apache.tez.runtime.library.api.KeyValueReader;
  * Just pump the records through the query plan.
  */
 public class MapRecordProcessor extends RecordProcessor {
-  public static final Log l4j = LogFactory.getLog(MapRecordProcessor.class);
+  public static final Logger l4j = LoggerFactory.getLogger(MapRecordProcessor.class);
   protected static final String MAP_PLAN_KEY = "__MAP_PLAN__";
 
   private MapOperator mapOp;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java
index a70c2c4..b53c933 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.ql.exec.tez;
 
 import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.MapOperator;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -37,7 +37,7 @@ import org.apache.tez.runtime.library.api.KeyValueReader;
 
 public class MapRecordSource implements RecordSource {
 
-  public static final Log LOG = LogFactory.getLog(MapRecordSource.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MapRecordSource.class);
   private ExecMapperContext execContext = null;
   private MapOperator mapOp = null;
   private KeyValueReader reader = null;
@@ -90,7 +90,7 @@ public class MapRecordSource implements RecordSource {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        LOG.fatal(StringUtils.stringifyException(e));
+        LOG.error(StringUtils.stringifyException(e));
         closeReader();
         throw new RuntimeException(e);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java
index f352f8c..bb56e1c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java
@@ -21,8 +21,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Callable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
@@ -53,8 +53,8 @@ import com.google.common.collect.Lists;
  */
 public class MergeFileRecordProcessor extends RecordProcessor {
 
-  public static final Log LOG = LogFactory
-      .getLog(MergeFileRecordProcessor.class);
+  public static final Logger LOG = LoggerFactory
+      .getLogger(MergeFileRecordProcessor.class);
 
   protected Operator<? extends OperatorDesc> mergeOp;
   private ExecMapperContext execContext = null;
@@ -218,7 +218,7 @@ public class MergeFileRecordProcessor extends RecordProcessor {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        l4j.fatal(StringUtils.stringifyException(e));
+        l4j.error(StringUtils.stringifyException(e));
         throw new RuntimeException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java
index 64295d4..06dca00 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java
@@ -23,8 +23,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.tez.runtime.api.ObjectRegistry;
 
@@ -36,7 +36,7 @@ import com.google.common.base.Preconditions;
  */
 public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache {
 
-  private static final Log LOG = LogFactory.getLog(ObjectCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class.getName());
 
   // ObjectRegistry is available via the Input/Output/ProcessorContext.
   // This is setup as part of the Tez Processor construction, so that it is available whenever an

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
index 6096be5..2f08529 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
@@ -24,15 +24,12 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.concurrent.Callable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.ObjectCache;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.tez.TezProcessor.TezKVOutputCollector;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.BaseWork;
-import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -40,6 +37,8 @@ import org.apache.tez.mapreduce.processor.MRTaskReporter;
 import org.apache.tez.runtime.api.LogicalInput;
 import org.apache.tez.runtime.api.LogicalOutput;
 import org.apache.tez.runtime.api.ProcessorContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
@@ -57,7 +56,7 @@ public abstract class RecordProcessor  {
   protected Map<String, OutputCollector> outMap;
   protected final ProcessorContext processorContext;
 
-  public static final Log l4j = LogFactory.getLog(RecordProcessor.class);
+  public static final Logger l4j = LoggerFactory.getLogger(RecordProcessor.class);
 
 
   // used to log memory usage periodically

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
index 7c41cb6..8768847 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
@@ -26,8 +26,8 @@ import java.util.Map.Entry;
 import java.util.TreeMap;
 import java.util.concurrent.Callable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.io.api.LlapIoProxy;
 import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
@@ -66,7 +66,7 @@ public class ReduceRecordProcessor  extends RecordProcessor{
 
   private ObjectCache cache;
 
-  public static final Log l4j = LogFactory.getLog(ReduceRecordProcessor.class);
+  public static final Logger l4j = LoggerFactory.getLogger(ReduceRecordProcessor.class);
 
   private ReduceWork reduceWork;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
index 1f2f9f9..41cf953 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
@@ -24,8 +24,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -69,7 +69,7 @@ import org.apache.tez.runtime.library.api.KeyValuesReader;
 @SuppressWarnings("deprecation")
 public class ReduceRecordSource implements RecordSource {
 
-  public static final Log l4j = LogFactory.getLog(ReduceRecordSource.class);
+  public static final Logger l4j = LoggerFactory.getLogger(ReduceRecordSource.class);
 
   private static final String CLASS_NAME = ReduceRecordSource.class.getName();
 
@@ -288,7 +288,7 @@ public class ReduceRecordSource implements RecordSource {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        l4j.fatal(StringUtils.stringifyException(e));
+        l4j.error(StringUtils.stringifyException(e));
         throw new RuntimeException(e);
       }
     }
@@ -394,7 +394,7 @@ public class ReduceRecordSource implements RecordSource {
         // Don't create a new object if we are already out of memory
         throw (OutOfMemoryError) e;
       } else {
-        l4j.fatal(StringUtils.stringifyException(e));
+        l4j.error(StringUtils.stringifyException(e));
         throw new RuntimeException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java
index f95aabf..aaaa6a5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java
@@ -30,8 +30,8 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -56,7 +56,7 @@ import com.google.common.collect.Multimap;
  */
 public class SplitGrouper {
 
-  private static final Log LOG = LogFactory.getLog(SplitGrouper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SplitGrouper.class);
 
   // TODO This needs to be looked at. Map of Map to Map... Made concurrent for now since split generation
   // can happen in parallel.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java
index 3eb954d..a3fc815 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.ql.exec.tez;
 
 import java.lang.reflect.Method;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * TezJobExecHelper is a utility to safely call Tez functionality from
@@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory;
  */
 public class TezJobExecHelper {
 
-  private static final Log LOG = LogFactory.getLog(TezJobExecHelper.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TezJobExecHelper.class.getName());
 
   public static void killRunningJobs() {
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
index d9d8184..23f2487 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -55,7 +55,7 @@ public class TezProcessor extends AbstractLogicalIOProcessor {
     void initializeHook(TezProcessor source);
   }
 
-  private static final Log LOG = LogFactory.getLog(TezProcessor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TezProcessor.class);
   protected boolean isMap = false;
 
   protected RecordProcessor rproc = null;
@@ -195,6 +195,7 @@ public class TezProcessor extends AbstractLogicalIOProcessor {
     }
   }
 
+  @Override
   public void abort() {
     aborted.set(true);
     RecordProcessor rProcLocal;