You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/10/28 16:45:58 UTC

[10/14] hive git commit: HIVE-12237 : Use slf4j as logging facade

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
index c465c84..91abb80 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
@@ -26,8 +26,8 @@ import java.sql.SQLTransactionRollbackException;
 import java.sql.Statement;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
 
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.shims.ShimLoader;
  */
 public final class TxnDbUtil {
 
-  static final private Log LOG = LogFactory.getLog(TxnDbUtil.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(TxnDbUtil.class.getName());
   private static final String TXN_MANAGER = "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager";
 
   private static int deadlockCnt = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index ca485fa..5c5e6ff 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -22,8 +22,8 @@ import com.jolbox.bonecp.BoneCPDataSource;
 import org.apache.commons.dbcp.ConnectionFactory;
 import org.apache.commons.dbcp.DriverManagerConnectionFactory;
 import org.apache.commons.dbcp.PoolableConnectionFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.commons.dbcp.PoolingDataSource;
 
 import org.apache.commons.pool.ObjectPool;
@@ -82,7 +82,7 @@ public class TxnHandler {
 
   static final private int ALLOWED_REPEATED_DEADLOCKS = 10;
   static final private int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 100;
-  static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(TxnHandler.class.getName());
 
   static private DataSource connPool;
   static private boolean doRetryOnConnPool = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java
index 00bbad7..2eb8354 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreInitContext;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
index 7e46523..9acf9d7 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
@@ -33,8 +33,8 @@ import java.util.Set;
 
 import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.thrift.TException;
 
 class VerifyingObjectStore extends ObjectStore {
-  private static final Log LOG = LogFactory.getLog(VerifyingObjectStore.class);
+  private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class);
 
   public VerifyingObjectStore() {
     super();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java
index af8f5fc..6cd3a46 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.metastore.hbase;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -51,7 +51,7 @@ import java.util.SortedMap;
 import java.util.TreeMap;
 
 public class TestHBaseAggregateStatsCache {
-  private static final Log LOG = LogFactory.getLog(TestHBaseAggregateStatsCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseAggregateStatsCache.class.getName());
 
   @Mock HTableInterface htable;
   private HBaseStore store;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
index b6dfcf3..22582d2 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -73,7 +73,7 @@ import java.util.TreeMap;
  *
  */
 public class TestHBaseStore {
-  private static final Log LOG = LogFactory.getLog(TestHBaseStore.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStore.class.getName());
   static Map<String, String> emptyParameters = new HashMap<String, String>();
   // Table with NUM_PART_KEYS partitioning keys and NUM_PARTITIONS values per key
   static final int NUM_PART_KEYS = 1;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java
index 7ccfdb4..0fe25e6 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.Delete;
@@ -76,7 +76,7 @@ import java.util.TreeMap;
  *
  */
 public class TestHBaseStoreCached {
-  private static final Log LOG = LogFactory.getLog(TestHBaseStoreCached.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreCached.class.getName());
   static Map<String, String> emptyParameters = new HashMap<String, String>();
 
   @Rule public ExpectedException thrown = ExpectedException.none();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java
index fdfb6d1..e0d8ce4 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
@@ -37,7 +37,7 @@ import java.util.List;
  *
  */
 public class TestSharedStorageDescriptor {
-  private static final Log LOG = LogFactory.getLog(TestHBaseStore.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStore.class.getName());
 
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
index abceaf3..a765f61 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.junit.Test;
 
 public class TestTxnHandlerNegative {
-  static final private Log LOG = LogFactory.getLog(TestTxnHandlerNegative.class);
+  static final private Logger LOG = LoggerFactory.getLogger(TestTxnHandlerNegative.class);
 
   /**
    * this intentionally sets a bad URL for connection to test error handling logic

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3b3303c..282d077 100644
--- a/pom.xml
+++ b/pom.xml
@@ -47,6 +47,7 @@
     <module>ql</module>
     <module>serde</module>
     <module>service</module>
+    <module>llap-server</module>
     <module>llap-client</module>
     <module>shims</module>
     <module>spark-client</module>
@@ -116,7 +117,6 @@
     <commons-io.version>2.4</commons-io.version>
     <commons-lang.version>2.6</commons-lang.version>
     <commons-lang3.version>3.1</commons-lang3.version>
-    <commons-logging.version>1.1.3</commons-logging.version>
     <commons-pool.version>1.5.4</commons-pool.version>
     <commons-dbcp.version>1.4</commons-dbcp.version>
     <derby.version>10.10.2.0</derby.version>
@@ -329,11 +329,6 @@
         <version>${commons-lang.version}</version>
       </dependency>
       <dependency>
-        <groupId>commons-logging</groupId>
-        <artifactId>commons-logging</artifactId>
-        <version>${commons-logging.version}</version>
-      </dependency>
-      <dependency>
         <groupId>io.netty</groupId>
         <artifactId>netty-all</artifactId>
         <version>${netty.version}</version>
@@ -379,11 +374,6 @@
         <version>${log4j2.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.apache.logging.log4j</groupId>
-        <artifactId>log4j-jcl</artifactId>
-        <version>${log4j2.version}</version>
-      </dependency>
-      <dependency>
         <groupId>org.antlr</groupId>
         <artifactId>antlr-runtime</artifactId>
         <version>${antlr.version}</version>
@@ -600,12 +590,22 @@
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-client</artifactId>
         <version>${hadoop.version}</version>
-      </dependency>
+         <exclusions>
+           <exclusion>
+            <groupId>commmons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+         </exclusions>
+     </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-common</artifactId>
         <version>${hadoop.version}</version>
         <exclusions>
+           <exclusion>
+            <groupId>commmons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
           <exclusion>
             <groupId>org.apache.httpcomponents</groupId>
             <artifactId>httpcore</artifactId>

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index 8ac13a6..005c232 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -107,11 +107,6 @@
       <version>${commons-lang.version}</version>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-      <version>${commons-logging.version}</version>
-    </dependency>
-    <dependency>
       <groupId>javolution</groupId>
       <artifactId>javolution</artifactId>
       <version>${javolution.version}</version>
@@ -127,11 +122,6 @@
       <version>${log4j2.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-jcl</artifactId>
-      <version>${log4j2.version}</version>
-    </dependency>
-    <dependency>
       <groupId>org.antlr</groupId>
       <artifactId>antlr-runtime</artifactId>
       <version>${antlr.version}</version>

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java b/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java
deleted file mode 100644
index bcdea1d..0000000
--- a/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional debugrmation
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.llap;
-
-import org.apache.commons.logging.Log;
-
-public class LogLevels {
-  private final boolean isT, isD, isI, isW, isE;
-
-  public LogLevels(Log log) {
-    isT = log.isTraceEnabled();
-    isD = log.isDebugEnabled();
-    isI = log.isInfoEnabled();
-    isW = log.isWarnEnabled();
-    isE = log.isErrorEnabled();
-  }
-
-  public boolean isTraceEnabled() {
-    return isT;
-  }
-
-  public boolean isDebugEnabled() {
-    return isD;
-  }
-
-  public boolean isInfoEnabled() {
-    return isI;
-  }
-
-  public boolean isWarnEnabled() {
-    return isW;
-  }
-
-  public boolean isErrorEnabled() {
-    return isE;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/Context.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index 1499a91..affaec8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -31,8 +31,8 @@ import java.util.Random;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.antlr.runtime.TokenRewriteStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
@@ -62,7 +62,7 @@ public class Context {
   private Path resFile;
   private Path resDir;
   private FileSystem resFs;
-  private static final Log LOG = LogFactory.getLog("hive.ql.Context");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.Context");
   private Path[] resDirPaths;
   private int resDirFilesNum;
   boolean initialized;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 44b247f..dfbadf7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -35,8 +35,8 @@ import java.util.Set;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -124,7 +124,7 @@ import org.apache.hadoop.mapred.JobConf;
 public class Driver implements CommandProcessor {
 
   static final private String CLASS_NAME = Driver.class.getName();
-  static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+  private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
 
   private int maxRows = 100;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
index c7d3b66..f43992c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
@@ -40,9 +40,9 @@ import java.util.Queue;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * DriverContext.
@@ -50,7 +50,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
  */
 public class DriverContext {
 
-  private static final Log LOG = LogFactory.getLog(Driver.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(Driver.class.getName());
   private static final SessionState.LogHelper console = new SessionState.LogHelper(LOG);
 
   private static final int SLEEP_TIME = 2000;
@@ -189,6 +189,7 @@ public class DriverContext {
     // extract stats keys from StatsTask
     List<Task<?>> rootTasks = plan.getRootTasks();
     NodeUtils.iterateTask(rootTasks, StatsTask.class, new Function<StatsTask>() {
+      @Override
       public void apply(StatsTask statsTask) {
         statsTasks.put(statsTask.getWork().getAggKey(), statsTask);
       }
@@ -212,6 +213,7 @@ public class DriverContext {
     }
     final List<String> statKeys = new ArrayList<String>(1);
     NodeUtils.iterate(operators, FileSinkOperator.class, new Function<FileSinkOperator>() {
+      @Override
       public void apply(FileSinkOperator fsOp) {
         if (fsOp.getConf().isGatherStats()) {
           statKeys.add(fsOp.getConf().getStatsAggPrefix());

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
index b9776ea..9132a21 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
@@ -35,8 +35,6 @@ import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
@@ -69,7 +67,6 @@ import org.apache.thrift.transport.TMemoryBuffer;
 public class QueryPlan implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private static final Log LOG = LogFactory.getLog(QueryPlan.class.getName());
 
   private String queryString;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java
index f1c32b9..a3ec0e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java
@@ -19,13 +19,9 @@ package org.apache.hadoop.hive.ql.exec;
 
 import java.io.IOException;
 import java.io.Serializable;
-import java.util.Collection;
 import java.util.HashSet;
 import java.util.Set;
-import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -34,6 +30,8 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
 import org.apache.hadoop.hive.ql.plan.FileMergeDesc;
 import org.apache.hadoop.mapred.JobConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Fast file merge operator for ORC and RCfile. This is an abstract class which
@@ -44,8 +42,7 @@ public abstract class AbstractFileMergeOperator<T extends FileMergeDesc>
     extends Operator<T> implements Serializable {
 
   public static final String BACKUP_PREFIX = "_backup.";
-  public static final Log LOG = LogFactory
-      .getLog(AbstractFileMergeOperator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(AbstractFileMergeOperator.class);
 
   protected JobConf jc;
   protected FileSystem fs;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
index 54b61a9..be38b9a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
@@ -28,8 +28,8 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -48,7 +48,7 @@ import org.apache.hadoop.hive.shims.HadoopShims;
  */
 @SuppressWarnings("nls")
 public final class ArchiveUtils {
-  private static final Log LOG = LogFactory.getLog(ArchiveUtils.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ArchiveUtils.class.getName());
 
   public static String ARCHIVING_LEVEL = "archiving_level";
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
index a46bf6b..13e5ccc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec;
 import java.util.Timer;
 import java.util.TimerTask;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.Reporter;
 
 /**
@@ -32,7 +32,7 @@ import org.apache.hadoop.mapred.Reporter;
  * indefinitely.
  */
 public class AutoProgressor {
-  protected Log LOG = LogFactory.getLog(this.getClass().getName());
+  private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName());
 
   // Timer that reports every 5 minutes to the jobtracker. This ensures that
   // even if the operator returning rows for greater than that

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
index 6636354..f6fbe74 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
@@ -24,8 +24,8 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -74,7 +74,7 @@ import org.apache.hadoop.util.StringUtils;
 public class ColumnStatsTask extends Task<ColumnStatsWork> implements Serializable {
   private static final long serialVersionUID = 1L;
   private FetchOperator ftOp;
-  private static transient final Log LOG = LogFactory.getLog(ColumnStatsTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(ColumnStatsTask.class);
 
   public ColumnStatsTask() {
     super();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index a665f85..dcbbe2e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -26,8 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
@@ -64,8 +64,8 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> {
   private static final long serialVersionUID = 1L;
-  private static transient final Log LOG = LogFactory
-      .getLog(ColumnStatsUpdateTask.class);
+  private static transient final Logger LOG = LoggerFactory
+      .getLogger(ColumnStatsUpdateTask.class);
 
   @Override
   public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) {
@@ -313,7 +313,7 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> {
         return persistPartitionStats();
       }
     } catch (Exception e) {
-      LOG.info(e);
+      LOG.info("Failed to persist stats in metastore", e);
     }
     return 1;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
index bcb9fce..b0170f5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
@@ -27,8 +27,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 public abstract class CommonJoinOperator<T extends JoinDesc> extends
     Operator<T> implements Serializable {
   private static final long serialVersionUID = 1L;
-  protected static final Log LOG = LogFactory.getLog(CommonJoinOperator.class
+  protected static final Logger LOG = LoggerFactory.getLogger(CommonJoinOperator.class
       .getName());
 
   protected transient int numAliases; // number of aliases

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
index 44381b0..d5d62ca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
@@ -29,8 +29,8 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
@@ -69,7 +69,7 @@ public class CommonMergeJoinOperator extends AbstractMapJoinOperator<CommonMerge
 
   private static final long serialVersionUID = 1L;
   private boolean isBigTableWork;
-  private static final Log LOG = LogFactory.getLog(CommonMergeJoinOperator.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CommonMergeJoinOperator.class.getName());
   transient List<Object>[] keyWritables;
   transient List<Object>[] nextKeyWritables;
   transient RowContainer<List<Object>>[] nextGroupStorage;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
index 865613a..cbe0aca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec;
 
 import java.io.Serializable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,7 +40,7 @@ public class CopyTask extends Task<CopyWork> implements Serializable {
 
   private static final long serialVersionUID = 1L;
 
-  private static transient final Log LOG = LogFactory.getLog(CopyTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(CopyTask.class);
 
   public CopyTask() {
     super();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 20be624..dcac9ca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec;
 import com.google.common.collect.Iterables;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -223,7 +223,7 @@ import static org.apache.hadoop.util.StringUtils.stringifyException;
  **/
 public class DDLTask extends Task<DDLWork> implements Serializable {
   private static final long serialVersionUID = 1L;
-  private static final Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.DDLTask");
 
   private static final int separator = Utilities.tabCode;
   private static final int terminator = Utilities.newLineCode;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java
index fccdc89..cd3cf98 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java
@@ -23,14 +23,14 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.Path;
 
 public class DefaultBucketMatcher implements BucketMatcher {
 
-  protected Log LOG = LogFactory.getLog(this.getClass().getName());
+  protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName());
 
   //MAPPING: bucket_file_name_in_big_table->{alias_table->corresonding_bucket_file_names}
   private Map<String, Map<String, List<String>>> aliasBucketMapping;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
index 41389bd..0888c7b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
@@ -27,8 +27,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.DemuxDesc;
@@ -51,7 +51,7 @@ public class DemuxOperator extends Operator<DemuxDesc>
   implements Serializable {
 
   private static final long serialVersionUID = 1L;
-  protected static final Log LOG = LogFactory.getLog(DemuxOperator.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(DemuxOperator.class.getName());
 
   // Counters for debugging, we cannot use existing counters (cntr and nextCntr)
   // in Operator since we want to individually track the number of rows from

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index a74a8ad..f48db6a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -39,8 +39,8 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
 import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory;
@@ -78,11 +78,11 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
   public static final String EXPL_COLUMN_NAME = "Explain";
   private final Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
   private boolean isLogical = false;
-  protected final Log LOG;
+  protected final Logger LOG;
 
   public ExplainTask() {
     super();
-    LOG = LogFactory.getLog(this.getClass().getName());
+    LOG = LoggerFactory.getLogger(this.getClass().getName());
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
index b09b706..221abd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
@@ -40,8 +40,8 @@ import java.util.List;
  */
 public class ExprNodeGenericFuncEvaluator extends ExprNodeEvaluator<ExprNodeGenericFuncDesc> {
 
-  private static final Log LOG = LogFactory
-      .getLog(ExprNodeGenericFuncEvaluator.class.getName());
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ExprNodeGenericFuncEvaluator.class.getName());
 
   transient GenericUDF genericUDF;
   transient Object rowObject;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
index 26ba320..157115b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
@@ -29,8 +29,6 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.commons.lang3.StringEscapeUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -60,7 +58,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.InputFormat;
@@ -72,6 +69,8 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hive.common.util.AnnotationUtils;
 import org.apache.hive.common.util.ReflectionUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Iterators;
 
@@ -80,7 +79,7 @@ import com.google.common.collect.Iterators;
  **/
 public class FetchOperator implements Serializable {
 
-  static final Log LOG = LogFactory.getLog(FetchOperator.class.getName());
+  static final Logger LOG = LoggerFactory.getLogger(FetchOperator.class.getName());
   static final LogHelper console = new LogHelper(LOG);
 
   public static final String FETCH_OPERATOR_DIRECTORY_LIST =

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
index 31aa3dc..1634143 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -52,7 +52,7 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
   private ListSinkOperator sink;
   private int totalRows;
 
-  private static transient final Log LOG = LogFactory.getLog(FetchTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(FetchTask.class);
 
   public FetchTask() {
     super();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 9da9499..7459bba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -25,18 +25,13 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.io.StringWriter;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
-import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -81,6 +76,8 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Lists;
 
@@ -90,7 +87,7 @@ import com.google.common.collect.Lists;
 public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
     Serializable {
 
-  public static final Log LOG = LogFactory.getLog(FileSinkOperator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FileSinkOperator.class);
   private static final boolean isInfoEnabled = LOG.isInfoEnabled();
   private static final boolean isDebugEnabled = LOG.isDebugEnabled();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 9316600..de8e98c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -32,8 +32,8 @@ import java.util.TreeSet;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -144,7 +144,7 @@ import org.apache.hive.common.util.AnnotationUtils;
  */
 public final class FunctionRegistry {
 
-  private static final Log LOG = LogFactory.getLog(FunctionRegistry.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FunctionRegistry.class);
 
   /*
    * PTF variables

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
index 7671d29..ec755a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
@@ -25,8 +25,8 @@ import java.util.List;
 
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Multimap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Function;
@@ -53,7 +53,7 @@ import org.apache.hadoop.util.StringUtils;
  */
 public class FunctionTask extends Task<FunctionWork> {
   private static final long serialVersionUID = 1L;
-  private static transient final Log LOG = LogFactory.getLog(FunctionTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(FunctionTask.class);
 
   public FunctionTask() {
     super();
@@ -237,7 +237,7 @@ public class FunctionTask extends Task<FunctionWork> {
       throws HiveException {
     // If this is a non-local warehouse, then adding resources from the local filesystem
     // may mean that other clients will not be able to access the resources.
-    // So disallow resources from local filesystem in this case. 
+    // So disallow resources from local filesystem in this case.
     if (resources != null && resources.size() > 0) {
       try {
         String localFsScheme = FileSystem.getLocal(db.getConf()).getUri().getScheme();
@@ -258,7 +258,7 @@ public class FunctionTask extends Task<FunctionWork> {
       } catch (HiveException e) {
         throw e;
       } catch (Exception e) {
-        LOG.error(e);
+        LOG.error("Exception caught in checkLocalFunctionResources", e);
         throw new HiveException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
index c06fb56..76308f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
@@ -22,12 +22,10 @@ import java.io.IOException;
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.List;
-import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -59,7 +57,7 @@ import org.apache.hadoop.util.ReflectionUtils;
 public class HashTableSinkOperator extends TerminalOperator<HashTableSinkDesc> implements
     Serializable {
   private static final long serialVersionUID = 1L;
-  protected static final Log LOG = LogFactory.getLog(HashTableSinkOperator.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(HashTableSinkOperator.class.getName());
 
   /**
    * The expressions for join inputs's join keys.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java
index 567890a..ff64563 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
@@ -37,7 +37,7 @@ public class Heartbeater {
   private HiveTxnManager txnMgr;
   private Configuration conf;
 
-  static final private Log LOG = LogFactory.getLog(Heartbeater.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(Heartbeater.class.getName());
 
   /**
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java
index 01a67e0..247d08c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java
@@ -20,8 +20,8 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -32,7 +32,7 @@ import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
 
 public class HiveTotalOrderPartitioner implements Partitioner<HiveKey, Object>, Configurable {
 
-  private static final Log LOG = LogFactory.getLog(HiveTotalOrderPartitioner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveTotalOrderPartitioner.class);
 
   private Partitioner<BytesWritable, Object> partitioner;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
index 3b92ab6..3453fc9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
@@ -24,7 +24,7 @@ import java.util.Collection;
 import java.util.List;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -186,7 +186,7 @@ public class JoinOperator extends CommonJoinOperator<JoinDesc> implements Serial
     super.jobCloseOp(hconf, success);
   }
 
-  private void moveUpFiles(Path specPath, Configuration hconf, Log log)
+  private void moveUpFiles(Path specPath, Configuration hconf, Logger log)
       throws IOException, HiveException {
     FileSystem fs = specPath.getFileSystem(hconf);
 
@@ -211,7 +211,7 @@ public class JoinOperator extends CommonJoinOperator<JoinDesc> implements Serial
    * @throws HiveException
    */
   private void  mvFileToFinalPath(Path specPath, Configuration hconf,
-      boolean success, Log log) throws IOException, HiveException {
+      boolean success, Logger log) throws IOException, HiveException {
 
     FileSystem fs = specPath.getFileSystem(hconf);
     Path tmpPath = Utilities.toTempPath(specPath);

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index b67ead7..95fd1bf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -29,8 +29,8 @@ import java.util.concurrent.Future;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -75,7 +75,7 @@ import org.apache.hive.common.util.ReflectionUtil;
 public class MapJoinOperator extends AbstractMapJoinOperator<MapJoinDesc> implements Serializable {
 
   private static final long serialVersionUID = 1L;
-  private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(MapJoinOperator.class.getName());
   private static final String CLASS_NAME = MapJoinOperator.class.getName();
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
index 4eca2d8..caf4aa3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
@@ -24,8 +24,8 @@ import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.tez.TezContext;
@@ -40,7 +40,7 @@ import org.apache.hadoop.mapred.Reporter;
  */
 public class MapredContext {
 
-  private static final Log logger = LogFactory.getLog("MapredContext");
+  private static final Logger logger = LoggerFactory.getLogger("MapredContext");
   private static final ThreadLocal<MapredContext> contexts = new ThreadLocal<MapredContext>();
 
   public static MapredContext get() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index 920bb1c..786e17f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -79,7 +79,7 @@ import java.util.Map;
 public class MoveTask extends Task<MoveWork> implements Serializable {
 
   private static final long serialVersionUID = 1L;
-  private static transient final Log LOG = LogFactory.getLog(MoveTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(MoveTask.class);
 
   public MoveTask() {
     super();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java
index 2760a8d..4f4abd3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java
@@ -25,8 +25,8 @@ import java.util.Collection;
 import java.util.List;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -72,7 +72,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 public class MuxOperator extends Operator<MuxDesc> implements Serializable{
 
   private static final long serialVersionUID = 1L;
-  protected static final Log LOG = LogFactory.getLog(MuxOperator.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(MuxOperator.class.getName());
 
   /**
    * Handler is used to construct the key-value structure.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java
index 22853bd..3d9771a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec;
 
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.io.api.LlapIoProxy;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.exec.tez.LlapObjectCache;
 public class ObjectCacheFactory {
   private static final ConcurrentHashMap<String, ObjectCache> llapQueryCaches =
       new ConcurrentHashMap<>();
-  private static final Log LOG = LogFactory.getLog(ObjectCacheFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectCacheFactory.class);
 
   private ObjectCacheFactory() {
     // avoid instantiation

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
index 6c7c6aa..b6fec61 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
@@ -32,8 +32,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
 import org.apache.hadoop.hive.ql.lib.Node;
@@ -221,8 +221,8 @@ public abstract class Operator<T extends OperatorDesc> implements Serializable,C
   protected transient Map<String, LongWritable> statsMap = new HashMap<String, LongWritable>();
   @SuppressWarnings("rawtypes")
   protected transient OutputCollector out;
-  protected transient final Log LOG = LogFactory.getLog(getClass().getName());
-  protected transient final Log PLOG = LogFactory.getLog(Operator.class.getName()); // for simple disabling logs from all operators
+  protected transient final Logger LOG = LoggerFactory.getLogger(getClass().getName());
+  protected transient final Logger PLOG = LoggerFactory.getLogger(Operator.class.getName()); // for simple disabling logs from all operators
   protected transient final boolean isLogInfoEnabled = LOG.isInfoEnabled() && PLOG.isInfoEnabled();
   protected transient final boolean isLogDebugEnabled = LOG.isDebugEnabled() && PLOG.isDebugEnabled();
   protected transient final boolean isLogTraceEnabled = LOG.isTraceEnabled() && PLOG.isTraceEnabled();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
index ff58741..f619a56 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.vector.VectorAppMasterEventOperator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hive.ql.plan.UnionDesc;
  */
 @SuppressWarnings({ "rawtypes", "unchecked" })
 public final class OperatorFactory {
-  protected static transient final Log LOG = LogFactory.getLog(OperatorFactory.class);
+  protected static transient final Logger LOG = LoggerFactory.getLogger(OperatorFactory.class);
   private static final List<OpTuple> opvec;
   private static final List<OpTuple> vectorOpvec;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
index bd10912..67e5c2a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.NodeUtils.Function;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -36,7 +36,7 @@ import com.google.common.collect.Multimap;
 
 public class OperatorUtils {
 
-  private static final Log LOG = LogFactory.getLog(OperatorUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OperatorUtils.class);
 
   public static <T> Set<T> findOperators(Operator<?> start, Class<T> clazz) {
     return findOperators(start, clazz, new HashSet<T>());

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
index 470c4e5..2ea6154 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec;
 import java.io.IOException;
 
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.shims.CombineHiveKey;
  */
 public class OrcFileMergeOperator extends
     AbstractFileMergeOperator<OrcFileMergeDesc> {
-  public final static Log LOG = LogFactory.getLog("OrcFileMergeOperator");
+  public final static Logger LOG = LoggerFactory.getLogger("OrcFileMergeOperator");
 
   // These parameters must match for all orc files involved in merging. If it
   // does not merge, the file will be put into incompatible file set and will

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
index 21d85f1..0d0211f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
@@ -22,8 +22,8 @@ import java.util.ConcurrentModificationException;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
  */
 @SuppressWarnings("deprecation")
 public class PTFPartition {
-  protected static Log LOG = LogFactory.getLog(PTFPartition.class);
+  protected static Logger LOG = LoggerFactory.getLogger(PTFPartition.class);
 
   SerDe serDe;
   StructObjectInspector inputOI;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
index dc1b601..67c4059 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
@@ -27,8 +27,8 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -47,7 +47,7 @@ import org.apache.hadoop.mapred.OutputCollector;
 
 public class PartitionKeySampler implements OutputCollector<HiveKey, Object> {
 
-  private static final Log LOG = LogFactory.getLog(PartitionKeySampler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PartitionKeySampler.class);
 
   public static final Comparator<byte[]> C = new Comparator<byte[]>() {
     public final int compare(byte[] o1, byte[] o2) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java
index 8657688..c34454c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.io.RCFile;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper;
@@ -36,7 +36,7 @@ import java.io.IOException;
  */
 public class RCFileMergeOperator
     extends AbstractFileMergeOperator<RCFileMergeDesc> {
-  public final static Log LOG = LogFactory.getLog("RCFileMergeMapper");
+  public final static Logger LOG = LoggerFactory.getLogger("RCFileMergeMapper");
 
   RCFile.Writer outWriter;
   CompressionCodec codec = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
index a5d59ae..1121819 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Sets;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -61,7 +61,7 @@ import java.util.regex.PatternSyntaxException;
 // Extracted from FunctionRegistry
 public class Registry {
 
-  private static final Log LOG = LogFactory.getLog(FunctionRegistry.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FunctionRegistry.class);
 
   // prefix for window functions, to discern LEAD/LAG UDFs from window functions with the same name
   private static final String WINDOW_FUNC_PREFIX = "@_";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
index b094fd9..62ae630 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
@@ -28,8 +28,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.ObjectPair;
@@ -61,7 +61,7 @@ public class SMBMapJoinOperator extends AbstractMapJoinOperator<SMBJoinDesc> imp
 
   private static final long serialVersionUID = 1L;
 
-  private static final Log LOG = LogFactory.getLog(SMBMapJoinOperator.class
+  private static final Logger LOG = LoggerFactory.getLogger(SMBMapJoinOperator.class
       .getName());
 
   private MapredLocalWork localWork = null;
@@ -165,7 +165,7 @@ public class SMBMapJoinOperator extends AbstractMapJoinOperator<SMBJoinDesc> imp
   }
 
   public void initializeMapredLocalWork(MapJoinDesc mjConf, Configuration hconf,
-      MapredLocalWork localWork, Log l4j) throws HiveException {
+      MapredLocalWork localWork, Logger l4j) throws HiveException {
     if (localWork == null || localWorkInited) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
index 74e6d15..0ff6659 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
@@ -26,8 +26,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -73,7 +73,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  */
 public class SkewJoinHandler {
 
-  protected static final Log LOG = LogFactory.getLog(SkewJoinHandler.class
+  protected static final Logger LOG = LoggerFactory.getLogger(SkewJoinHandler.class
       .getName());
 
   public int currBigKeyTag = -1;
@@ -282,7 +282,7 @@ public class SkewJoinHandler {
     try {
       fs.delete(operatorOutputPath, true);
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error("Failed to delete path ", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
index b2c7d16..7a8de2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java
@@ -26,8 +26,8 @@ import java.util.Set;
 import java.util.concurrent.Future;
 
 import org.apache.commons.io.FileExistsException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -50,7 +50,7 @@ public class SparkHashTableSinkOperator
   private static final long serialVersionUID = 1L;
   private final String CLASS_NAME = this.getClass().getName();
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
-  protected static final Log LOG = LogFactory.getLog(SparkHashTableSinkOperator.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(SparkHashTableSinkOperator.class.getName());
 
   private final HashTableSinkOperator htsOperator;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
index 0d99cbc..bb4bde9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
@@ -27,8 +27,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -70,7 +70,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable {
 
   private static final long serialVersionUID = 1L;
-  private static transient final Log LOG = LogFactory.getLog(StatsNoJobTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(StatsNoJobTask.class);
   private static ConcurrentMap<String, Partition> partUpdates;
   private static Table table;
   private static String tableFullName;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
index f71f55d..c50d5b6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
@@ -25,8 +25,8 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -62,7 +62,7 @@ import org.apache.hadoop.util.StringUtils;
 public class StatsTask extends Task<StatsWork> implements Serializable {
 
   private static final long serialVersionUID = 1L;
-  private static transient final Log LOG = LogFactory.getLog(StatsTask.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(StatsTask.class);
 
   private Table table;
   private List<LinkedHashMap<String, String>> dpPartSpecs;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index 4e66f38..c8e7549 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -27,8 +27,8 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -68,7 +68,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
   protected transient String jobID;
   protected Task<? extends Serializable> backupTask;
   protected List<Task<? extends Serializable>> backupChildrenTasks = new ArrayList<Task<? extends Serializable>>();
-  protected static transient Log LOG = LogFactory.getLog(Task.class);
+  protected static transient Logger LOG = LoggerFactory.getLogger(Task.class);
   protected int taskTag;
   private boolean isLocalMode =false;
   private boolean retryCmdWhenFail = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
index 8859add..46b3510 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
@@ -29,8 +29,8 @@ import java.util.TreeSet;
 
 import com.google.common.collect.MinMaxPriorityQueue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -43,7 +43,7 @@ import org.apache.hadoop.io.WritableComparator;
  * TODO: rename to TopNHeap?
  */
 public class TopNHash {
-  public static Log LOG = LogFactory.getLog(TopNHash.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TopNHash.class);
 
   /**
    * For interaction between operator and top-n hash.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
index e64fa7b..b3c6d91 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 public class UDTFOperator extends Operator<UDTFDesc> implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  protected static final Log LOG = LogFactory.getLog(UDTFOperator.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(UDTFOperator.class.getName());
 
   StructObjectInspector udtfInputOI = null;
   Object[] objToSendToUDTF = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 0eb5f6d..0618077 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -88,8 +88,8 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.WordUtils;
 import org.apache.commons.lang3.StringEscapeUtils;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.ContentSummary;
@@ -193,6 +193,7 @@ import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Shell;
 import org.apache.hive.common.util.ReflectionUtil;
+import org.slf4j.Logger;
 
 import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
@@ -254,7 +255,7 @@ public final class Utilities {
   private static GlobalWorkMapFactory gWorkMap = new GlobalWorkMapFactory();
 
   private static final String CLASS_NAME = Utilities.class.getName();
-  private static final Log LOG = LogFactory.getLog(CLASS_NAME);
+  private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   public static void clearWork(Configuration conf) {
     Path mapPath = getPlanPath(conf, MAP_PLAN_NAME);
@@ -1881,7 +1882,7 @@ public final class Utilities {
   }
 
   public static void mvFileToFinalPath(Path specPath, Configuration hconf,
-      boolean success, Log log, DynamicPartitionCtx dpCtx, FileSinkDesc conf,
+      boolean success, Logger log, DynamicPartitionCtx dpCtx, FileSinkDesc conf,
       Reporter reporter) throws IOException,
       HiveException {
 
@@ -2579,7 +2580,7 @@ public final class Utilities {
             try {
               new Path(path).getFileSystem(ctx.getConf()).close();
             } catch (IOException ignore) {
-                LOG.debug(ignore);
+                LOG.debug("Failed to close filesystem", ignore);
             }
           }
           if (executor != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
index d124f09..68123d4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
@@ -30,8 +30,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.http.HtmlQuoting;
@@ -43,7 +43,7 @@ import org.apache.hadoop.mapred.JobConf;
  */
 public class TaskLogProcessor {
 
-  private final Log LOG = LogFactory.getLog(TaskLogProcessor.class);
+  private final Logger LOG = LoggerFactory.getLogger(TaskLogProcessor.class);
   private final Map<ErrorHeuristic, HeuristicStats> heuristics =
     new HashMap<ErrorHeuristic, HeuristicStats>();
   private final List<String> taskLogUrls = new ArrayList<String>();