You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/10/28 16:45:55 UTC

[07/14] hive git commit: HIVE-12237 : Use slf4j as logging facade

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
index b7b6f90..a1e35cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
@@ -28,8 +28,8 @@ import java.util.Random;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -51,7 +51,7 @@ public class HiveHistoryImpl implements HiveHistory{
 
   String histFileName; // History file name
 
-  private static final Log LOG = LogFactory.getLog("hive.ql.exec.HiveHistoryImpl");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.HiveHistoryImpl");
 
   private static final Random randGen = new Random();
 
@@ -355,7 +355,7 @@ public class HiveHistoryImpl implements HiveHistory{
 
   @Override
   public void closeStream() {
-    IOUtils.cleanup(LOG, histStream);
+    IOUtils.closeStream(histStream);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
index 1b357de..616f2d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Listener;
 import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
@@ -38,7 +38,7 @@ public class HiveHistoryViewer implements Listener {
 
   String historyFile;
   String sessionId;
-  private static final Log LOG = LogFactory.getLog(HiveHistoryViewer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveHistoryViewer.class);
 
   // Job Hash Map
   private final HashMap<String, QueryInfo> jobInfoMap = new HashMap<String, QueryInfo>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index 2caa7ae..5610fab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -22,8 +22,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -46,7 +46,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
  */
 public class ATSHook implements ExecuteWithHookContext {
 
-  private static final Log LOG = LogFactory.getLog(ATSHook.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ATSHook.class.getName());
   private static final Object LOCK = new Object();
   private static ExecutorService executor;
   private static TimelineClient timelineClient;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
index 9988c79..64220f2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
@@ -31,8 +31,8 @@ import java.util.Set;
 import org.apache.commons.collections.SetUtils;
 import org.apache.commons.io.output.StringBuilderWriter;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -61,7 +61,7 @@ import com.google.gson.stream.JsonWriter;
  */
 public class LineageLogger implements ExecuteWithHookContext {
 
-  private static final Log LOG = LogFactory.getLog(LineageLogger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LineageLogger.class);
 
   private static final HashSet<String> OPERATION_NAMES = new HashSet<String>();
 
@@ -178,10 +178,10 @@ public class LineageLogger implements ExecuteWithHookContext {
         writer.endObject();
         writer.close();
 
-        // Log the lineage info
+        // Logger the lineage info
         String lineage = out.toString();
         if (testMode) {
-          // Log to console
+          // Logger to console
           log(lineage);
         } else {
           // In non-test mode, emit to a log file,
@@ -199,7 +199,7 @@ public class LineageLogger implements ExecuteWithHookContext {
   }
 
   /**
-   * Log an error to console if available.
+   * Logger an error to console if available.
    */
   private void log(String error) {
     LogHelper console = SessionState.getConsole();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
index b0b4a36..d5d1370 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.io.PrintStream;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -45,7 +45,7 @@ import com.google.common.collect.Lists;
  * in the file just to verify the impact of bloom filter fpp.
  */
 public class PostExecOrcFileDump implements ExecuteWithHookContext {
-  private static final Log LOG = LogFactory.getLog(PostExecOrcFileDump.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(PostExecOrcFileDump.class.getName());
 
   private static final PathFilter hiddenFileFilter = new PathFilter() {
     public boolean accept(Path p) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java
index 60c587f..81bda08 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.hooks;
 
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -34,7 +34,7 @@ import org.apache.tez.common.counters.TezCounters;
  * Post execution hook to print hive tez counters to console error stream.
  */
 public class PostExecTezSummaryPrinter implements ExecuteWithHookContext {
-  private static final Log LOG = LogFactory.getLog(PostExecTezSummaryPrinter.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(PostExecTezSummaryPrinter.class.getName());
 
   @Override
   public void run(HookContext hookContext) throws Exception {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
index 298e7f0..515f8b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.hooks;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.DummyPartition;
@@ -35,7 +35,7 @@ import java.io.Serializable;
  */
 public class WriteEntity extends Entity implements Serializable {
 
-  private static final Log LOG = LogFactory.getLog(WriteEntity.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WriteEntity.class);
 
   private boolean isTempURI = false;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java
index 835caf1..36bc9cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java
@@ -17,15 +17,15 @@
  */
 package org.apache.hadoop.hive.ql.index;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Holds index related constants
  */
 public class HiveIndex {
 
-  public static final Log l4j = LogFactory.getLog("HiveIndex");
+  public static final Logger l4j = LoggerFactory.getLogger("HiveIndex");
 
   public static String INDEX_TABLE_CREATETIME = "hive.index.basetbl.dfs.lastModifiedTime";
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java
index 6fe200b..33cc5c3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java
@@ -25,8 +25,8 @@ import java.util.Map;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,8 +47,8 @@ import org.apache.hadoop.mapred.LineRecordReader.LineReader;
  */
 public class HiveIndexResult {
 
-  public static final Log l4j =
-    LogFactory.getLog(HiveIndexResult.class.getSimpleName());
+  public static final Logger l4j =
+    LoggerFactory.getLogger(HiveIndexResult.class.getSimpleName());
 
   // IndexBucket
   static class IBucket {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
index c62add0..e072ee6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
@@ -27,8 +27,8 @@ import java.util.Map;
 import java.util.Arrays;
 import java.util.HashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -52,7 +52,7 @@ import org.apache.hadoop.mapred.JobConf;
  * Uses a blockfilter file to specify the blocks to query.
  */
 public class HiveIndexedInputFormat extends HiveInputFormat {
-  public static final Log l4j = LogFactory.getLog("HiveIndexInputFormat");
+  public static final Logger l4j = LoggerFactory.getLogger("HiveIndexInputFormat");
   private final String indexFile;
 
   public HiveIndexedInputFormat() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
index cb191ac..5ddbd0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -64,7 +64,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
 public class BitmapIndexHandler extends TableBasedIndexHandler {
 
   private Configuration configuration;
-  private static final Log LOG = LogFactory.getLog(BitmapIndexHandler.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(BitmapIndexHandler.class.getName());
 
   @Override
   public void generateIndexQuery(List<Index> indexes, ExprNodeDesc predicate,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
index 586e16d..1d9e131 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
@@ -25,8 +25,8 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -73,7 +73,7 @@ public class CompactIndexHandler extends TableBasedIndexHandler {
   private Set<String> partitionCols;
   // Whether or not the conditions have been met to use the fact the index is sorted
   private boolean useSorted;
-  private static final Log LOG = LogFactory.getLog(CompactIndexHandler.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CompactIndexHandler.class.getName());
 
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java
index 7cebe68..b71084d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java
@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.hive.ql.index.compact;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.index.HiveIndexedInputFormat;
 
 public class HiveCompactIndexInputFormat extends HiveIndexedInputFormat {
 
-  public static final Log l4j =
-    LogFactory.getLog(HiveCompactIndexInputFormat.class.getSimpleName());
+  public static final Logger l4j =
+    LoggerFactory.getLogger(HiveCompactIndexInputFormat.class.getSimpleName());
 
   public HiveCompactIndexInputFormat() {
     super("hive.index.compact.file");

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index e8d070c..24137e2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.io;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -91,7 +91,7 @@ public class AcidUtils {
   private AcidUtils() {
     // NOT USED
   }
-  private static final Log LOG = LogFactory.getLog(AcidUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AcidUtils.class);
 
   private static final Pattern ORIGINAL_PATTERN =
       Pattern.compile("[0-9]+_[0-9]+");

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
index edcc3b6..a9c1614 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -50,8 +50,8 @@ import org.apache.hadoop.mapred.Reporter;
 public class BucketizedHiveInputFormat<K extends WritableComparable, V extends Writable>
     extends HiveInputFormat<K, V> {
 
-  public static final Log LOG = LogFactory
-      .getLog("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat");
+  public static final Logger LOG = LoggerFactory
+      .getLogger("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat");
 
   @Override
   public RecordReader getRecordReader(InputSplit split, JobConf job,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java
index 9dfb6bf..56b3f84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java
@@ -22,8 +22,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
@@ -33,7 +33,7 @@ import org.apache.hadoop.io.compress.Decompressor;
  * native) compression/decompression codecs.
  */
 public final class CodecPool {
-  private static final Log LOG = LogFactory.getLog(CodecPool.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class);
 
   /**
    * A global compressor pool used to save the expensive

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
index 53bc1fa..323ac43 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
@@ -33,8 +33,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -72,7 +72,7 @@ public class CombineHiveInputFormat<K extends WritableComparable, V extends Writ
     extends HiveInputFormat<K, V> {
 
   private static final String CLASS_NAME = CombineHiveInputFormat.class.getName();
-  public static final Log LOG = LogFactory.getLog(CLASS_NAME);
+  public static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   // max number of threads we can use to check non-combinable paths
   private static final int MAX_CHECK_NONCOMBINABLE_THREAD_NUM = 50;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
index 0095d31..38c99fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.io;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 
 public class HdfsUtils {
   private static final HadoopShims SHIMS = ShimLoader.getHadoopShims();
-  private static final Log LOG = LogFactory.getLog(HdfsUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HdfsUtils.class);
 
   public static long getFileId(FileSystem fileSystem, Path path) throws IOException {
     String pathStr = path.toUri().getPath();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
index 738ca9c..7d36e42 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
@@ -23,8 +23,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
@@ -56,7 +56,7 @@ import org.apache.hadoop.mapred.RecordReader;
   */
 public abstract class HiveContextAwareRecordReader<K, V> implements RecordReader<K, V> {
 
-  private static final Log LOG = LogFactory.getLog(HiveContextAwareRecordReader.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(HiveContextAwareRecordReader.class.getName());
 
   private boolean initDone = false;
   private long rangeStart;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
index 06d3df7..0328a23 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
@@ -33,8 +33,8 @@ import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -77,7 +77,7 @@ import org.apache.hive.common.util.ReflectionUtil;
  *
  */
 public final class HiveFileFormatUtils {
-  private static final Log LOG = LogFactory.getLog(HiveFileFormatUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveFileFormatUtils.class);
 
   static {
     outputFormatSubstituteMap =

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index 45ee9c5..29c4b61 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -30,8 +30,8 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -77,7 +77,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     implements InputFormat<K, V>, JobConfigurable {
 
   private static final String CLASS_NAME = HiveInputFormat.class.getName();
-  private static final Log LOG = LogFactory.getLog(CLASS_NAME);
+  private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   /**
    * A cache of InputFormat instances.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java b/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java
index 14656a7..e857cf9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.io;
 
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
  */
 public class IOContextMap {
   public static final String DEFAULT_CONTEXT = "";
-  private static final Log LOG = LogFactory.getLog(IOContextMap.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IOContextMap.class);
 
   /** Used for Tez and MR */
   private static final ConcurrentHashMap<String, IOContext> globalMap =

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
index fd60fed..9638f2a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
@@ -22,8 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
@@ -39,7 +39,7 @@ public class NullRowsInputFormat implements InputFormat<NullWritable, NullWritab
     JobConfigurable {
 
   static final int MAX_ROW = 100; // to prevent infinite loop
-  static final Log LOG = LogFactory.getLog(NullRowsRecordReader.class.getName());
+  static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName());
 
   public static class DummyInputSplit implements InputSplit {
     public DummyInputSplit() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
index 2a27676..d391164 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
@@ -29,8 +29,8 @@ import java.security.MessageDigest;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -340,7 +340,7 @@ import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*;
  */
 public class RCFile {
 
-  private static final Log LOG = LogFactory.getLog(RCFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RCFile.class);
 
   // internal variable
   public static final String COLUMN_NUMBER_METADATA_STR = "hive.io.rcfile.column.number";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
index e0bf153..12d9d9a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
@@ -24,13 +24,13 @@ import java.util.ServiceLoader;
 
 import javax.annotation.Nullable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableMap;
 
 public class StorageFormatFactory {
-  private static final Log LOG = LogFactory.getLog(StorageFormatFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StorageFormatFactory.class);
 
   private final Map<String, StorageFormatDescriptor> storageFormats;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
index 2784185..59d3bba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
@@ -30,8 +30,8 @@ import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
@@ -52,7 +52,7 @@ import org.apache.hadoop.util.Progressable;
 public class AvroContainerOutputFormat
         implements HiveOutputFormat<WritableComparable, AvroGenericRecordWritable> {
 
-  public static final Log LOG = LogFactory.getLog(AvroContainerOutputFormat.class);
+  public static final Logger LOG = LoggerFactory.getLogger(AvroContainerOutputFormat.class);
 
   @Override
   public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
index 89fac3f..30862c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
@@ -29,8 +29,8 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.mapred.FsInput;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -52,7 +52,7 @@ import org.apache.hadoop.mapred.Reporter;
  */
 public class AvroGenericRecordReader implements
         RecordReader<NullWritable, AvroGenericRecordWritable>, JobConfigurable {
-  private static final Log LOG = LogFactory.getLog(AvroGenericRecordReader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AvroGenericRecordReader.class);
 
   final private org.apache.avro.file.FileReader<GenericRecord> reader;
   final private long start;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java
index 4c5eed2..1816fac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.io.merge;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -40,7 +40,7 @@ import java.io.IOException;
  * name is used for serialization and deserialization of MergeFileWork.
  */
 public class MergeFileMapper extends MapReduceBase implements Mapper {
-  public static final Log LOG = LogFactory.getLog("MergeFileMapper");
+  public static final Logger LOG = LoggerFactory.getLogger("MergeFileMapper");
   private static final String PLAN_KEY = "__MAP_PLAN__";
 
   private JobConf jc;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
index bd50b46..2f09014 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
@@ -189,7 +189,7 @@ public class MergeFileTask extends Task<MergeFileWork> implements Serializable,
         }
       } catch (Exception e) {
 	// jobClose needs to execute successfully otherwise fail task
-	LOG.warn(e);
+	LOG.warn("Job close failed ",e);
         if (success) {
           success = false;
           returnVal = 3;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
index fbc87e8..3339c8d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.io.merge;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -46,7 +46,7 @@ import java.util.List;
 @Explain(displayName = "Merge File Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class MergeFileWork extends MapWork {
 
-  private static final Log LOG = LogFactory.getLog(MergeFileWork.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MergeFileWork.class);
   private List<Path> inputPaths;
   private Path outputDir;
   private boolean hasDynamicPartitions;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java
index 3dde0c4..2275188 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java
@@ -24,8 +24,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.ListIterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.io.DiskRange;
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl.BufferChunk;
 
@@ -34,7 +34,7 @@ import com.google.protobuf.CodedInputStream;
 
 public abstract class InStream extends InputStream {
 
-  private static final Log LOG = LogFactory.getLog(InStream.class);
+  private static final Logger LOG = LoggerFactory.getLogger(InStream.class);
   private static final int PROTOBUF_MESSAGE_MAX_LIMIT = 1024 << 20; // 1GB
 
   protected final Long fileId;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java
index 0347a1c..4d5f735 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.io.orc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
@@ -43,7 +43,7 @@ import java.util.concurrent.locks.ReentrantLock;
  */
 class MemoryManager {
 
-  private static final Log LOG = LogFactory.getLog(MemoryManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MemoryManager.class);
 
   /**
    * How often should we check the memory sizes? Measured in rows added

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
index 7c927dc..73037ea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
@@ -38,8 +38,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
@@ -123,7 +123,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
     ETL
   }
 
-  private static final Log LOG = LogFactory.getLog(OrcInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcInputFormat.class);
   private static boolean isDebugEnabled = LOG.isDebugEnabled();
   static final HadoopShims SHIMS = ShimLoader.getHadoopShims();
   static final String MIN_SPLIT_SIZE =
@@ -466,7 +466,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
     private final boolean cacheStripeDetails;
     private final AtomicInteger cacheHitCounter = new AtomicInteger(0);
     private final AtomicInteger numFilesCounter = new AtomicInteger(0);
-    private ValidTxnList transactionList;
+    private final ValidTxnList transactionList;
     private SplitStrategyKind splitStrategyKind;
     private final SearchArgument sarg;
 
@@ -851,7 +851,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
     private final boolean hasBase;
     private OrcFile.WriterVersion writerVersion;
     private long projColsUncompressedSize;
-    private List<OrcSplit> deltaSplits;
+    private final List<OrcSplit> deltaSplits;
 
     public SplitGenerator(SplitInfo splitInfo) throws IOException {
       this.context = splitInfo.context;
@@ -1140,7 +1140,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
             context, adi.fs, adi.splitPath, adi.acidInfo, adi.baseOrOriginalFiles);
 
         if (isDebugEnabled) {
-          LOG.debug(splitStrategy);
+          LOG.debug("Split strategy: ", splitStrategy);
         }
 
         // Hack note - different split strategies return differently typed lists, yay Java.
@@ -1218,10 +1218,10 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
     private final Long fileId;
     private final List<StripeInformation> stripeInfos;
     private FileMetaInfo fileMetaInfo;
-    private List<StripeStatistics> stripeStats;
-    private List<OrcProto.ColumnStatistics> fileStats;
-    private List<OrcProto.Type> types;
-    private OrcFile.WriterVersion writerVersion;
+    private final List<StripeStatistics> stripeStats;
+    private final List<OrcProto.ColumnStatistics> fileStats;
+    private final List<OrcProto.Type> types;
+    private final OrcFile.WriterVersion writerVersion;
 
 
     FileInfo(long modificationTime, long size, List<StripeInformation> stripeInfos,
@@ -1296,6 +1296,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
     private final RecordIdentifier id;
     private final RowReader<OrcStruct> inner;
 
+    @Override
     public RecordIdentifier getRecordIdentifier() {
       return id;
     }
@@ -1606,7 +1607,7 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
 
   /** Local footer cache using Guava. Stores convoluted Java objects. */
   private static class LocalCache implements FooterCache {
-    private Cache<Path, FileInfo> cache;
+    private final Cache<Path, FileInfo> cache;
 
     public LocalCache(int numThreads, int cacheStripeDetailsSize) {
       cache = CacheBuilder.newBuilder()

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
index 1833d3d..c15b35f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -38,7 +38,7 @@ import org.apache.hadoop.mapreduce.lib.input.FileSplit;
  * value is the OrcStruct object */
 public class OrcNewInputFormat extends InputFormat<NullWritable, OrcStruct>{
 
-  private static final Log LOG = LogFactory.getLog(OrcNewInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcNewInputFormat.class);
 
   @Override
   public RecordReader<NullWritable, OrcStruct> createRecordReader(

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
index ad24c58..8a5de7f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
@@ -24,8 +24,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
@@ -66,7 +66,7 @@ import org.apache.hadoop.util.Progressable;
 public class OrcOutputFormat extends FileOutputFormat<NullWritable, OrcSerdeRow>
                         implements AcidOutputFormat<NullWritable, OrcSerdeRow> {
 
-  private static final Log LOG = LogFactory.getLog(OrcOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcOutputFormat.class);
 
   static TypeDescription convertTypeInfo(TypeInfo info) {
     switch (info.getCategory()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index fb5110d..ebe1afd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -52,7 +52,7 @@ import java.util.TreeMap;
  */
 public class OrcRawRecordMerger implements AcidInputFormat.RawReader<OrcStruct>{
 
-  private static final Log LOG = LogFactory.getLog(OrcRawRecordMerger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcRawRecordMerger.class);
 
   private final Configuration conf;
   private final boolean collapse;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
index 01374a7..67c5a11 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
@@ -25,8 +25,8 @@ import java.nio.charset.CharsetDecoder;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +51,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class OrcRecordUpdater implements RecordUpdater {
 
-  private static final Log LOG = LogFactory.getLog(OrcRecordUpdater.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcRecordUpdater.class);
 
   public static final String ACID_KEY_INDEX_NAME = "hive.acid.key.index";
   public static final String ACID_FORMAT = "_orc_acid_version";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
index 595f3b3..c0e9b1a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde;
@@ -46,7 +46,7 @@ import org.apache.hadoop.io.Writable;
 @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES, OrcSerde.COMPRESSION})
 public class OrcSerde implements SerDe, VectorizedSerde {
 
-  private static final Log LOG = LogFactory.getLog(OrcSerde.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcSerde.class);
 
   private final OrcSerdeRow row = new OrcSerdeRow();
   private ObjectInspector inspector = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
index 33513bc..81afb48 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
@@ -25,8 +25,8 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.ColumnarSplit;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
@@ -42,7 +42,7 @@ import org.apache.hadoop.mapred.FileSplit;
  *
  */
 public class OrcSplit extends FileSplit implements ColumnarSplit {
-  private static final Log LOG = LogFactory.getLog(OrcSplit.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcSplit.class);
 
   private FileMetaInfo fileMetaInfo;
   private boolean hasFooter;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java
index 3e2af23..b654b64 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hive.ql.io.orc;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class OrcUtils {
-  private static final Log LOG = LogFactory.getLog(OrcUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcUtils.class);
 
   /**
    * Returns selected columns as a boolean array with true value set for specified column names.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
index 15c4417..f6dea25 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
@@ -26,8 +26,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +49,7 @@ import com.google.protobuf.CodedInputStream;
 
 public class ReaderImpl implements Reader {
 
-  private static final Log LOG = LogFactory.getLog(ReaderImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ReaderImpl.class);
 
   private static final int DIRECTORY_SIZE_GUESS = 16 * 1024;
 
@@ -278,7 +278,7 @@ public class ReaderImpl implements Reader {
    * @param path the data source path for error messages
    * @param version the version of hive that wrote the file.
    */
-  static void checkOrcVersion(Log log, Path path, List<Integer> version) {
+  static void checkOrcVersion(Logger log, Path path, List<Integer> version) {
     if (version.size() >= 1) {
       int major = version.get(0);
       int minor = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java
index 23a9af4..5e7d636 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -46,7 +46,7 @@ import com.google.common.collect.Lists;
  * to see if type promotions are possible.
  */
 public class RecordReaderFactory {
-  static final Log LOG = LogFactory.getLog(RecordReaderFactory.class);
+  static final Logger LOG = LoggerFactory.getLogger(RecordReaderFactory.class);
   private static final boolean isLogInfoEnabled = LOG.isInfoEnabled();
 
   public static TreeReaderFactory.TreeReader createTreeReader(int colId,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index 33c914e..0696277 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -29,8 +29,8 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.Text;
 
 public class RecordReaderImpl implements RecordReader {
-  static final Log LOG = LogFactory.getLog(RecordReaderImpl.class);
+  static final Logger LOG = LoggerFactory.getLogger(RecordReaderImpl.class);
   private static final boolean isLogDebugEnabled = LOG.isDebugEnabled();
   private final Path path;
   private final long firstRow;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java
index 8318a6a..aef3231 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java
@@ -21,8 +21,8 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.EncodingType;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.EncodingType;
  * compression techniques.
  */
 public class RunLengthIntegerReaderV2 implements IntegerReader {
-  public static final Log LOG = LogFactory.getLog(RunLengthIntegerReaderV2.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RunLengthIntegerReaderV2.class);
 
   private InStream input;
   private final boolean signed;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 5bcd8f4..5a82d20 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -32,8 +32,8 @@ import java.util.Map;
 import java.util.TimeZone;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -102,7 +102,7 @@ import com.google.protobuf.CodedOutputStream;
  */
 public class WriterImpl implements Writer, MemoryManager.Callback {
 
-  private static final Log LOG = LogFactory.getLog(WriterImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WriterImpl.class);
   static final HadoopShims SHIMS = ShimLoader.getHadoopShims();
 
   private static final int HDFS_BUFFER_SIZE = 256 * 1024;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
index 0d3b64c..e0c0743 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
@@ -22,8 +22,8 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.Pool;
 import org.apache.hadoop.hive.common.Pool.PoolObjectHelper;
 import org.apache.hadoop.hive.common.io.DataCache;
@@ -83,7 +83,7 @@ import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory;
  *    not use it; thus, at the end we go thru all the MBs, and release those not released by (5).
  */
 class EncodedReaderImpl implements EncodedReader {
-  public static final Log LOG = LogFactory.getLog(EncodedReaderImpl.class);
+  public static final Logger LOG = LoggerFactory.getLogger(EncodedReaderImpl.class);
   private static final Object POOLS_CREATION_LOCK = new Object();
   private static Pools POOLS;
   private static class Pools {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java
index 3e00612..f95ebcd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java
@@ -13,8 +13,8 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf.Operator;
@@ -35,7 +35,7 @@ import static org.apache.parquet.filter2.predicate.FilterApi.floatColumn;
 import static org.apache.parquet.filter2.predicate.FilterApi.intColumn;
 
 public class LeafFilterFactory {
-  private static final Log LOG = LogFactory.getLog(LeafFilterFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LeafFilterFactory.class);
 
   class IntFilterPredicateLeafBuilder extends FilterPredicateLeafBuilder {
     /**

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
index d82e93c..a4e35cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
@@ -14,8 +14,8 @@
 package org.apache.hadoop.hive.ql.io.parquet;
 
 import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport;
 import org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper;
@@ -36,7 +36,7 @@ import org.apache.parquet.hadoop.ParquetInputFormat;
  */
 public class MapredParquetInputFormat extends FileInputFormat<NullWritable, ArrayWritable> {
 
-  private static final Log LOG = LogFactory.getLog(MapredParquetInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapredParquetInputFormat.class);
 
   private final ParquetInputFormat<ArrayWritable> realInput;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
index 5e71df9..bfb48a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
@@ -19,8 +19,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -50,7 +50,7 @@ import org.apache.parquet.hadoop.ParquetOutputFormat;
 public class MapredParquetOutputFormat extends FileOutputFormat<NullWritable, ParquetHiveRecord>
     implements HiveOutputFormat<NullWritable, ParquetHiveRecord> {
 
-  private static final Log LOG = LogFactory.getLog(MapredParquetOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapredParquetOutputFormat.class);
 
   protected ParquetOutputFormat<ParquetHiveRecord> realOutputFormat;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
index 4848efd..13390de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
@@ -22,8 +22,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.JobConf;
 
 public class ProjectionPusher {
 
-  private static final Log LOG = LogFactory.getLog(ProjectionPusher.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ProjectionPusher.class);
 
   private final Map<String, PartitionDesc> pathToPartitionInfo =
       new LinkedHashMap<String, PartitionDesc>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java
index ed99615..b28d870 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java
@@ -14,8 +14,8 @@
 package org.apache.hadoop.hive.ql.io.parquet;
 
 import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.vector.VectorColumnAssign;
 import org.apache.hadoop.hive.ql.exec.vector.VectorColumnAssignFactory;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface;
@@ -41,14 +41,14 @@ import org.apache.parquet.hadoop.ParquetInputFormat;
 public class VectorizedParquetInputFormat extends FileInputFormat<NullWritable, VectorizedRowBatch>
   implements VectorizedInputFormatInterface {
 
-  private static final Log LOG = LogFactory.getLog(VectorizedParquetInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(VectorizedParquetInputFormat.class);
 
   /**
    * Vectorized record reader for vectorized Parquet input format
    */
   private static class VectorizedParquetRecordReader implements
       RecordReader<NullWritable, VectorizedRowBatch> {
-    private static final Log LOG = LogFactory.getLog(VectorizedParquetRecordReader.class);
+    private static final Logger LOG = LoggerFactory.getLogger(VectorizedParquetRecordReader.class);
 
     private final ParquetRecordReaderWrapper internalReader;
       private VectorizedRowBatchCtx rbCtx;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java
index d1864ae..786a260 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.read;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.io.parquet.FilterPredicateLeafBuilder;
 import org.apache.hadoop.hive.ql.io.parquet.LeafFilterFactory;
 import org.apache.hadoop.hive.ql.io.sarg.ExpressionTree;
@@ -34,7 +34,7 @@ import java.util.List;
 import java.util.Set;
 
 public class ParquetFilterPredicateConverter {
-  private static final Log LOG = LogFactory.getLog(ParquetFilterPredicateConverter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetFilterPredicateConverter.class);
 
   /**
    * Translate the search argument to the filter predicate parquet uses. It includes

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
index f689b90..74a1a82 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
@@ -17,8 +17,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -55,7 +55,7 @@ import org.apache.parquet.schema.MessageTypeParser;
 import com.google.common.base.Strings;
 
 public class ParquetRecordReaderWrapper  implements RecordReader<NullWritable, ArrayWritable> {
-  public static final Log LOG = LogFactory.getLog(ParquetRecordReaderWrapper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordReaderWrapper.class);
 
   private final long splitLen; // for getPos()
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
index 493cd36..69272dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
@@ -13,8 +13,8 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.write;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
@@ -59,7 +59,7 @@ import java.util.Map;
  * This class is only used through DataWritableWriteSupport class.
  */
 public class DataWritableWriter {
-  private static final Log LOG = LogFactory.getLog(DataWritableWriter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DataWritableWriter.class);
   protected final RecordConsumer recordConsumer;
   private final GroupType schema;
 
@@ -547,4 +547,4 @@ public class DataWritableWriter {
       recordConsumer.addInteger(DateWritable.dateToDays(vDate));
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
index 9e2a9e1..2f838fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
@@ -16,8 +16,8 @@ package org.apache.hadoop.hive.ql.io.parquet.write;
 import java.io.IOException;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
@@ -39,7 +39,7 @@ import org.apache.parquet.hadoop.util.ContextUtil;
 public class ParquetRecordWriterWrapper implements RecordWriter<NullWritable, ParquetHiveRecord>,
   org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter {
 
-  public static final Log LOG = LogFactory.getLog(ParquetRecordWriterWrapper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordWriterWrapper.class);
 
   private final org.apache.hadoop.mapreduce.RecordWriter<NullWritable, ParquetHiveRecord> realWriter;
   private final TaskAttemptContext taskContext;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java
index d06f502..8a5360e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -62,7 +62,7 @@ public class PartialScanMapper extends MapReduceBase implements
   private boolean exception = false;
   private Reporter rp = null;
 
-  public final static Log LOG = LogFactory.getLog("PartialScanMapper");
+  private static final Logger LOG = LoggerFactory.getLogger("PartialScanMapper");
 
   public PartialScanMapper() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 8bebd0f..fd04fb5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -24,8 +24,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -253,7 +252,7 @@ public class PartialScanTask extends Task<PartialScanWork> implements
           jobID = rj.getID().toString();
         }
       } catch (Exception e) {
-	LOG.warn(e);
+	LOG.warn("Failed in cleaning up ", e);
       } finally {
 	HadoopJobExecHelper.runningJobs.remove(rj);
       }
@@ -333,7 +332,7 @@ public class PartialScanTask extends Task<PartialScanWork> implements
     }
     HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class);
 
-    Log LOG = LogFactory.getLog(PartialScanTask.class.getName());
+    org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName());
     boolean isSilent = HiveConf.getBoolVar(conf,
         HiveConf.ConfVars.HIVESESSIONSILENT);
     LogHelper console = new LogHelper(LOG, isSilent);

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
index 6d8694b..34a18cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.io.rcfile.truncate;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -68,7 +68,7 @@ public class ColumnTruncateMapper extends MapReduceBase implements
   Path dpPath;
   ColumnTruncateWork work;
 
-  public final static Log LOG = LogFactory.getLog(ColumnTruncateMapper.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ColumnTruncateMapper.class.getName());
 
   public ColumnTruncateMapper() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
index 149ad93..79b3cfa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
@@ -43,7 +43,6 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
@@ -121,7 +120,7 @@ public class ColumnTruncateTask extends Task<ColumnTruncateWork> implements Seri
     LOG.info("Using " + inpFormat);
 
     try {
-      job.setInputFormat((Class<? extends InputFormat>) JavaUtils.loadClass(inpFormat));
+      job.setInputFormat(JavaUtils.loadClass(inpFormat));
     } catch (ClassNotFoundException e) {
       throw new RuntimeException(e.getMessage(), e);
     }
@@ -218,7 +217,7 @@ public class ColumnTruncateTask extends Task<ColumnTruncateWork> implements Seri
         ColumnTruncateMapper.jobClose(outputPath, success, job, console,
           work.getDynPartCtx(), null);
       } catch (Exception e) {
-	LOG.warn(e);
+	LOG.warn("Failed while cleaning up ", e);
       } finally {
 	HadoopJobExecHelper.runningJobs.remove(rj);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
index 0fff9aa..7e888bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
@@ -23,8 +23,8 @@ import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -56,7 +56,7 @@ import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 
 public class ConvertAstToSearchArg {
-  private static final Log LOG = LogFactory.getLog(ConvertAstToSearchArg.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConvertAstToSearchArg.class);
   private final SearchArgument.Builder builder =
       SearchArgumentFactory.newBuilder();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
index bb9da9d..42616ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.lockmgr;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
@@ -42,7 +42,7 @@ import java.util.Set;
 public class DbLockManager implements HiveLockManager{
 
   static final private String CLASS_NAME = DbLockManager.class.getName();
-  static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+  static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   private static final long MAX_SLEEP = 15000;
   private HiveLockManagerCtx context;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
index 219a54a..97d2282 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.lockmgr;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -47,7 +47,7 @@ import java.util.List;
 public class DbTxnManager extends HiveTxnManagerImpl {
 
   static final private String CLASS_NAME = DbTxnManager.class.getName();
-  static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+  static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   private DbLockManager lockMgr = null;
   private IMetaStoreClient client = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
index 7acc53f..2d30198 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.lockmgr;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidReadTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -43,8 +43,8 @@ import java.util.*;
  * transactions.  This provides default Hive behavior.
  */
 class DummyTxnManager extends HiveTxnManagerImpl {
-  static final private Log LOG =
-      LogFactory.getLog(DummyTxnManager.class.getName());
+  static final private Logger LOG =
+      LoggerFactory.getLogger(DummyTxnManager.class.getName());
 
   private HiveLockManager lockMgr;