You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/10/28 16:45:59 UTC

[11/14] hive git commit: HIVE-12237 : Use slf4j as logging facade

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java
index cf3cc78..784c631 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java
@@ -22,8 +22,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.BlockingService;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hive.llap.daemon.LlapDaemonProtocolBlockingPB;
 public class LlapDaemonProtocolServerImpl extends AbstractService
     implements LlapDaemonProtocolBlockingPB {
 
-  private static final Log LOG = LogFactory.getLog(LlapDaemonProtocolServerImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LlapDaemonProtocolServerImpl.class);
 
   private final int numHandlers;
   private final ContainerRunner containerRunner;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
index 5c95086..3b38597 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
@@ -29,7 +29,6 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hive.common.CallableWithNdc;
 import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler;
 import org.apache.hadoop.hive.llap.daemon.HistoryLogger;
 import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler;
@@ -47,6 +46,7 @@ import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.tez.common.CallableWithNdc;
 import org.apache.tez.common.TezCommonUtils;
 import org.apache.tez.common.security.JobTokenIdentifier;
 import org.apache.tez.common.security.TokenCache;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java
index 57aa1e7..621a6a6 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java
@@ -31,11 +31,12 @@ import org.apache.hadoop.hive.llap.daemon.registry.ServiceRegistry;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class LlapFixedRegistryImpl implements ServiceRegistry {
 
-  private static final Logger LOG = Logger.getLogger(LlapFixedRegistryImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LlapFixedRegistryImpl.class);
 
   @InterfaceAudience.Private
   // This is primarily for testing to avoid the host lookup
@@ -219,4 +220,4 @@ public class LlapFixedRegistryImpl implements ServiceRegistry {
   public String toString() {
     return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java
index d3647d0..6550940 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java
@@ -20,11 +20,12 @@ import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import org.apache.hadoop.hive.llap.daemon.registry.ServiceInstanceSet;
 import org.apache.hadoop.hive.llap.daemon.registry.ServiceRegistry;
 import org.apache.hadoop.service.AbstractService;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class LlapRegistryService extends AbstractService {
 
-  private static final Logger LOG = Logger.getLogger(LlapRegistryService.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LlapRegistryService.class);
 
   private ServiceRegistry registry = null;
   private final boolean isDaemon;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java
index cb1b1d0..599da13 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java
@@ -48,7 +48,8 @@ import org.apache.hadoop.registry.client.types.ProtocolTypes;
 import org.apache.hadoop.registry.client.types.ServiceRecord;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.tez.dag.api.TezUncheckedException;
 import org.apache.zookeeper.CreateMode;
 
@@ -56,7 +57,7 @@ import com.google.common.base.Preconditions;
 
 public class LlapYarnRegistryImpl implements ServiceRegistry {
 
-  private static final Logger LOG = Logger.getLogger(LlapYarnRegistryImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LlapYarnRegistryImpl.class);
 
   private final RegistryOperationsService client;
   private final Configuration conf;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
index 2275719..9520413 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
@@ -6,11 +6,9 @@ import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
-import org.apache.log4j.Logger;
 
 public class LlapWebServices extends AbstractService {
 
-  private static final Logger LOG = Logger.getLogger(LlapWebServices.class);
 
   private int port;
   private boolean ssl;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
index 322235f..51f4c8e 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
@@ -111,7 +111,8 @@ public class LlapInputFormat
 
     private Throwable pendingError = null;
     /** Vector that is currently being processed by our user. */
-    private boolean isDone = false, isClosed = false;
+    private boolean isDone = false;
+    private final boolean isClosed = false;
     private ConsumerFeedback<ColumnVectorBatch> feedback;
     private final QueryFragmentCounters counters;
     private long firstReturnTime;
@@ -261,7 +262,7 @@ public class LlapInputFormat
         LlapIoImpl.LOG.info("close called; closed " + isClosed + ", done " + isDone
             + ", err " + pendingError + ", pending " + pendingData.size());
       }
-      LlapIoImpl.LOG.info(counters); // This is where counters are logged!
+      LlapIoImpl.LOG.info("Llap counters: {}" ,counters); // This is where counters are logged!
       feedback.stop();
       rethrowErrorIfAny();
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
index d79b46a..83a88f5 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
@@ -23,11 +23,10 @@ import java.util.concurrent.Executors;
 
 import javax.management.ObjectName;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.llap.LogLevels;
 import org.apache.hadoop.hive.llap.cache.BuddyAllocator;
 import org.apache.hadoop.hive.llap.cache.Cache;
 import org.apache.hadoop.hive.llap.cache.EvictionAwareAllocator;
@@ -56,22 +55,20 @@ import com.google.common.util.concurrent.MoreExecutors;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 public class LlapIoImpl implements LlapIo<VectorizedRowBatch> {
-  public static final Log LOG = LogFactory.getLog(LlapIoImpl.class);
-  public static final LogLevels LOGL = new LogLevels(LOG);
+  public static final Logger LOG = LoggerFactory.getLogger(LlapIoImpl.class);
 
   private final ColumnVectorProducer cvp;
   private final ListeningExecutorService executor;
-  private LlapDaemonCacheMetrics cacheMetrics;
-  private LlapDaemonQueueMetrics queueMetrics;
+  private final LlapDaemonCacheMetrics cacheMetrics;
+  private final LlapDaemonQueueMetrics queueMetrics;
   private ObjectName buddyAllocatorMXBean;
   private EvictionAwareAllocator allocator;
 
   private LlapIoImpl(Configuration conf) throws IOException {
     boolean useLowLevelCache = HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_LOW_LEVEL_CACHE);
     // High-level cache not supported yet.
-    if (LOGL.isInfoEnabled()) {
-      LOG.info("Initializing LLAP IO" + (useLowLevelCache ? " with low level cache" : ""));
-    }
+    LOG.info("Initializing LLAP IO {}", useLowLevelCache ? " with low level cache" : "");
+
 
     String displayName = "LlapDaemonCacheMetrics-" + MetricsUtils.getHostName();
     String sessionId = conf.get("llap.daemon.metrics.sessionid");
@@ -114,10 +111,7 @@ public class LlapIoImpl implements LlapIo<VectorizedRowBatch> {
     // TODO: this should depends on input format and be in a map, or something.
     this.cvp = new OrcColumnVectorProducer(metadataCache, orcCache, cache, conf, cacheMetrics,
         queueMetrics);
-    if (LOGL.isInfoEnabled()) {
-      LOG.info("LLAP IO initialized");
-    }
-
+    LOG.info("LLAP IO initialized");
     registerMXBeans();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
index 259c483..38c31d3 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
@@ -43,16 +43,15 @@ public class OrcColumnVectorProducer implements ColumnVectorProducer {
   private final Cache<OrcCacheKey> cache;
   private final LowLevelCache lowLevelCache;
   private final Configuration conf;
-  private boolean _skipCorrupt; // TODO: get rid of this
-  private LlapDaemonCacheMetrics cacheMetrics;
-  private LlapDaemonQueueMetrics queueMetrics;
+  private final boolean _skipCorrupt; // TODO: get rid of this
+  private final LlapDaemonCacheMetrics cacheMetrics;
+  private final LlapDaemonQueueMetrics queueMetrics;
 
   public OrcColumnVectorProducer(OrcMetadataCache metadataCache,
       LowLevelCacheImpl lowLevelCache, Cache<OrcCacheKey> cache, Configuration conf,
       LlapDaemonCacheMetrics metrics, LlapDaemonQueueMetrics queueMetrics) {
-    if (LlapIoImpl.LOGL.isInfoEnabled()) {
       LlapIoImpl.LOG.info("Initializing ORC column vector producer");
-    }
+
 
     this.metadataCache = metadataCache;
     this.lowLevelCache = lowLevelCache;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index 86a56ab..e625490 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -8,12 +8,11 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.CallableWithNdc;
 import org.apache.hadoop.hive.common.Pool;
 import org.apache.hadoop.hive.common.Pool.PoolObjectHelper;
 import org.apache.hadoop.hive.common.io.DataCache;
@@ -64,6 +63,7 @@ import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.common.util.FixedSizedObjectPool;
+import org.apache.tez.common.CallableWithNdc;
 
 /**
  * This produces EncodedColumnBatch via ORC EncodedDataImpl.
@@ -73,7 +73,7 @@ import org.apache.hive.common.util.FixedSizedObjectPool;
  */
 public class OrcEncodedDataReader extends CallableWithNdc<Void>
     implements ConsumerFeedback<OrcEncodedColumnBatch>, Consumer<OrcEncodedColumnBatch> {
-  private static final Log LOG = LogFactory.getLog(OrcEncodedDataReader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrcEncodedDataReader.class);
   public static final FixedSizedObjectPool<ColumnStreamData> CSD_POOL =
       new FixedSizedObjectPool<>(8192, new PoolObjectHelper<ColumnStreamData>() {
         @Override
@@ -198,9 +198,8 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
 
   protected Void performDataRead() throws IOException {
     long startTime = counters.startTimeCounter();
-    if (LlapIoImpl.LOGL.isInfoEnabled()) {
-      LlapIoImpl.LOG.info("Processing data for " + split.getPath());
-    }
+    LlapIoImpl.LOG.info("Processing data for {}", split.getPath());
+
     if (processStop()) {
       recordReaderTime(startTime);
       return null;
@@ -745,7 +744,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     long offset = split.getStart(), maxOffset = offset + split.getLength();
     stripeIxFrom = -1;
     int stripeIxTo = -1;
-    if (LlapIoImpl.LOGL.isDebugEnabled()) {
+    if (LlapIoImpl.LOG.isDebugEnabled()) {
       String tmp = "FileSplit {" + split.getStart() + ", " + split.getLength() + "}; stripes ";
       for (StripeInformation stripe : stripes) {
         tmp += "{" + stripe.getOffset() + ", " + stripe.getLength() + "}, ";
@@ -893,7 +892,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
   }
 
   private class DataWrapperForOrc implements DataReader, DataCache {
-    private DataReader orcDataReader;
+    private final DataReader orcDataReader;
 
     public DataWrapperForOrc() {
       boolean useZeroCopy = (conf != null) && OrcConf.USE_ZEROCOPY.getBoolean(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java
index b1d2cf7..83ccc7f 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java
@@ -41,13 +41,13 @@ import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler.AttemptPathIdentifier;
 
 class DirWatcher {
 
-  private static final Log LOG = LogFactory.getLog(DirWatcher.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DirWatcher.class);
 
   private static enum Type {
     BASE, // App Base Dir / ${dagDir}

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java
index b23e25e..8a0b86f 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java
@@ -18,8 +18,8 @@ import java.io.FileDescriptor;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.ReadaheadPool;
 import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
 import org.apache.hadoop.io.nativeio.NativeIO;
@@ -27,7 +27,7 @@ import org.jboss.netty.handler.stream.ChunkedFile;
 
 public class FadvisedChunkedFile extends ChunkedFile {
 
-  private static final Log LOG = LogFactory.getLog(FadvisedChunkedFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FadvisedChunkedFile.class);
 
   private final boolean manageOsCache;
   private final int readaheadLength;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java
index 69ea363..57f29d8 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java
@@ -22,8 +22,8 @@ import java.nio.channels.FileChannel;
 import java.nio.channels.WritableByteChannel;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.ReadaheadPool;
 import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
 import org.apache.hadoop.io.nativeio.NativeIO;
@@ -31,7 +31,7 @@ import org.jboss.netty.channel.DefaultFileRegion;
 
 public class FadvisedFileRegion extends DefaultFileRegion {
 
-  private static final Log LOG = LogFactory.getLog(FadvisedFileRegion.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FadvisedFileRegion.class);
 
   private final boolean manageOsCache;
   private final int readaheadLength;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java
index a647a55..786486f 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java
@@ -22,8 +22,8 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.tez.runtime.library.common.Constants;
@@ -35,7 +35,7 @@ class IndexCache {
   private final Configuration conf;
   private final int totalMemoryAllowed;
   private AtomicInteger totalMemoryUsed = new AtomicInteger();
-  private static final Log LOG = LogFactory.getLog(IndexCache.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IndexCache.class);
 
   private final ConcurrentHashMap<String,IndexInformation> cache =
       new ConcurrentHashMap<String,IndexInformation>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
index 46fd975..762f069 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
@@ -58,8 +58,8 @@ import com.google.common.cache.RemovalListener;
 import com.google.common.cache.RemovalNotification;
 import com.google.common.cache.Weigher;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
@@ -115,7 +115,7 @@ import org.jboss.netty.util.CharsetUtil;
 
 public class ShuffleHandler implements AttemptRegistrationListener {
 
-  private static final Log LOG = LogFactory.getLog(ShuffleHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ShuffleHandler.class);
 
   public static final String SHUFFLE_HANDLER_LOCAL_DIRS = "llap.shuffle.handler.local-dirs";
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java b/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java
index 5428305..066fae5 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java
@@ -24,8 +24,8 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.llap.LlapNodeId;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto;
@@ -40,7 +40,7 @@ import org.apache.tez.runtime.api.impl.InputSpec;
 
 public class SourceStateTracker {
 
-  private static final Log LOG = LogFactory.getLog(SourceStateTracker.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SourceStateTracker.class);
 
   private final TaskCommunicatorContext taskCommunicatorContext;
   private final LlapTaskCommunicator taskCommunicator;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java
index d4d4bb2..6d21997 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java
@@ -27,8 +27,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.FutureTask;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.io.Allocator.AllocatorOutOfMemoryException;
 import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics;
 import org.junit.Test;
 
 public class TestBuddyAllocator {
-  private static final Log LOG = LogFactory.getLog(TestBuddyAllocator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestBuddyAllocator.class);
   private final Random rdm = new Random(2284);
 
   private static class DummyMemoryManager implements MemoryManager {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
index ef51869..7be82c2 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
@@ -26,8 +26,8 @@ import java.util.List;
 import java.util.ArrayList;
 import java.util.LinkedHashSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator;
 import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator;
 import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata;
@@ -61,7 +61,7 @@ import org.mockito.Mockito;
 import com.google.protobuf.CodedOutputStream;
 
 public class TestIncrementalObjectSizeEstimator {
-  private static final Log LOG = LogFactory.getLog(TestIncrementalObjectSizeEstimator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestIncrementalObjectSizeEstimator.class);
 
   private static class DummyMetadataReader implements MetadataReader  {
     public boolean doStreamStep = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java
index 1e673ad..8324b21 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java
@@ -31,8 +31,8 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.FutureTask;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.io.DiskRange;
 import org.apache.hadoop.hive.common.io.DiskRangeList;
 import org.apache.hadoop.hive.common.io.DataCache.DiskRangeListFactory;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.io.orc.encoded.CacheChunk;
 import org.junit.Test;
 
 public class TestLowLevelCacheImpl {
-  private static final Log LOG = LogFactory.getLog(TestLowLevelCacheImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestLowLevelCacheImpl.class);
 
   private static final DiskRangeListFactory testFactory = new DiskRangeListFactory() {
     public DiskRangeList createCacheChunk(MemoryBuffer buffer, long offset, long end) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java
index bb815e3..bb530ef 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java
@@ -26,8 +26,8 @@ import java.util.List;
 import java.util.Random;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority;
@@ -36,7 +36,7 @@ import org.junit.Assume;
 import org.junit.Test;
 
 public class TestLowLevelLrfuCachePolicy {
-  private static final Log LOG = LogFactory.getLog(TestLowLevelLrfuCachePolicy.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestLowLevelLrfuCachePolicy.class);
 
   @Test
   public void testRegression_HIVE_12178() throws Exception {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java
index 2886d54..b886d77 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hive.llap.cache;
 
 import static org.junit.Assert.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority;
 import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata;
 import org.apache.hadoop.hive.llap.io.metadata.OrcMetadataCache;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
index df967c4..4525ab9 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
@@ -20,8 +20,8 @@ import java.net.InetSocketAddress;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
@@ -36,7 +36,7 @@ import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
 import com.google.common.base.Preconditions;
 
 public class MiniLlapCluster extends AbstractService {
-  private static final Log LOG = LogFactory.getLog(MiniLlapCluster.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MiniLlapCluster.class);
 
   private final File testWorkDir;
   private final long execBytesPerService;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/pom.xml
----------------------------------------------------------------------
diff --git a/metastore/pom.xml b/metastore/pom.xml
index 4cd1e6d..e3f825b 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -75,11 +75,6 @@
       <artifactId>commons-lang</artifactId>
       <version>${commons-lang.version}</version>
     </dependency>
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-      <version>${commons-logging.version}</version>
-    </dependency>
     <!-- XXX we probably shouldn't be shipping this but the tests depend on it -->
     <dependency>
       <groupId>org.apache.derby</groupId>
@@ -153,7 +148,15 @@
       <artifactId>tephra-core</artifactId>
       <version>${tephra.version}</version>
       <exclusions>
+          <exclusion>
+          <groupId>ch.qos.logback</groupId>
+          <artifactId>logback-classic</artifactId>
+        </exclusion>
         <exclusion>
+          <groupId>ch.qos.logback</groupId>
+          <artifactId>logback-core</artifactId>
+        </exclusion>
+       <exclusion>
           <groupId>org.ow2.asm</groupId>
           <artifactId>asm-all</artifactId>
         </exclusion>

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java b/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
index 65e2c65..58c9f9e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
@@ -19,8 +19,8 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
@@ -41,7 +41,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 public class AggregateStatsCache {
 
-  private static final Log LOG = LogFactory.getLog(AggregateStatsCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(AggregateStatsCache.class.getName());
   private static AggregateStatsCache self = null;
 
   // Backing store for this cache
@@ -49,7 +49,7 @@ public class AggregateStatsCache {
   // Cache size
   private final int maxCacheNodes;
   // Current nodes in the cache
-  private AtomicInteger currentNodes = new AtomicInteger(0);
+  private final AtomicInteger currentNodes = new AtomicInteger(0);
   // Run the cleaner thread when the cache is maxFull% full
   private final float maxFull;
   // Run the cleaner thread until cache is cleanUntil% occupied
@@ -68,8 +68,8 @@ public class AggregateStatsCache {
   private final float maxVariance;
   // Used to determine if cleaner thread is already running
   private boolean isCleaning = false;
-  private AtomicLong cacheHits = new AtomicLong(0);
-  private AtomicLong cacheMisses = new AtomicLong(0);
+  private final AtomicLong cacheHits = new AtomicLong(0);
+  private final AtomicLong cacheMisses = new AtomicLong(0);
   // To track cleaner metrics
   int numRemovedTTL = 0, numRemovedLRU = 0;
 
@@ -196,7 +196,7 @@ public class AggregateStatsCache {
         cacheMisses.incrementAndGet();
       }
     } catch (InterruptedException e) {
-      LOG.debug(e);
+      LOG.debug("Interrupted Exception ignored ",e);
     } finally {
       if (isLocked) {
         candidateList.readLock.unlock();
@@ -227,7 +227,7 @@ public class AggregateStatsCache {
     // Note: we're not creating a copy of the list for saving memory
     for (AggrColStats candidate : candidates) {
       // Variance check
-      if ((float) Math.abs((candidate.getNumPartsCached() - numPartsRequested) / numPartsRequested)
+      if (Math.abs((candidate.getNumPartsCached() - numPartsRequested) / numPartsRequested)
           > maxVariance) {
         continue;
       }
@@ -309,7 +309,7 @@ public class AggregateStatsCache {
         currentNodes.getAndIncrement();
       }
     } catch (InterruptedException e) {
-      LOG.debug(e);
+      LOG.debug("Interrupted Exception ignored ", e);
     } finally {
       if (isLocked) {
         nodeList.writeLock.unlock();
@@ -342,9 +342,9 @@ public class AggregateStatsCache {
           Iterator<Map.Entry<Key, AggrColStatsList>> mapIterator = cacheStore.entrySet().iterator();
           while (mapIterator.hasNext()) {
             Map.Entry<Key, AggrColStatsList> pair =
-                (Map.Entry<Key, AggrColStatsList>) mapIterator.next();
+                mapIterator.next();
             AggrColStats node;
-            AggrColStatsList candidateList = (AggrColStatsList) pair.getValue();
+            AggrColStatsList candidateList = pair.getValue();
             List<AggrColStats> nodes = candidateList.nodes;
             if (nodes.size() == 0) {
               mapIterator.remove();
@@ -365,7 +365,7 @@ public class AggregateStatsCache {
                 }
               }
             } catch (InterruptedException e) {
-              LOG.debug(e);
+              LOG.debug("Interrupted Exception ignored ",e);
             } finally {
               if (isLocked) {
                 candidateList.writeLock.unlock();
@@ -453,7 +453,7 @@ public class AggregateStatsCache {
         numRemovedLRU++;
       }
     } catch (InterruptedException e) {
-      LOG.debug(e);
+      LOG.debug("Interrupted Exception ignored ",e);
     } finally {
       if (isLocked) {
         candidateList.writeLock.unlock();
@@ -508,11 +508,11 @@ public class AggregateStatsCache {
   static class AggrColStatsList {
     // TODO: figure out a better data structure for node list(?)
     private List<AggrColStats> nodes = new ArrayList<AggrColStats>();
-    private ReadWriteLock lock = new ReentrantReadWriteLock();
+    private final ReadWriteLock lock = new ReentrantReadWriteLock();
     // Read lock for get operation
-    private Lock readLock = lock.readLock();
+    private final Lock readLock = lock.readLock();
     // Write lock for add, evict and clean operation
-    private Lock writeLock = lock.writeLock();
+    private final Lock writeLock = lock.writeLock();
     // Using volatile instead of locking updates to this variable,
     // since we can rely on approx lastAccessTime but don't want a performance hit
     private volatile long lastAccessTime = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
index 82a537a..f29d453 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 
 /**
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
  * It is recommended to use it as a ThreadLocal variable.
  */
 public class Deadline {
-  private static final Log LOG = LogFactory.getLog(Deadline.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(Deadline.class.getName());
 
   /**
    * its value is init from conf, and could be reset from client.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 45f3515..628c37d 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hive.metastore;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -55,7 +55,7 @@ import java.util.List;
 public class HiveAlterHandler implements AlterHandler {
 
   protected Configuration hiveConf;
-  private static final Log LOG = LogFactory.getLog(HiveAlterHandler.class
+  private static final Logger LOG = LoggerFactory.getLogger(HiveAlterHandler.class
       .getName());
 
   @Override
@@ -242,12 +242,12 @@ public class HiveAlterHandler implements AlterHandler {
       // commit the changes
       success = msdb.commitTransaction();
     } catch (InvalidObjectException e) {
-      LOG.debug(e);
+      LOG.debug("Failed to get object from Metastore ", e);
       throw new InvalidOperationException(
           "Unable to change partition or table."
               + " Check metastore logs for detailed stack." + e.getMessage());
     } catch (NoSuchObjectException e) {
-      LOG.debug(e);
+      LOG.debug("Object not found in metastore ", e);
       throw new InvalidOperationException(
           "Unable to change partition or table. Database " + dbname + " does not exist"
               + " Check metastore logs for detailed stack." + e.getMessage());
@@ -402,7 +402,7 @@ public class HiveAlterHandler implements AlterHandler {
             Warehouse.makePartName(tbl.getPartitionKeys(), new_part.getValues()));
           destPath = constructRenamedPath(destPath, new Path(new_part.getSd().getLocation()));
         } catch (NoSuchObjectException e) {
-          LOG.debug(e);
+          LOG.debug("Didn't find object in metastore ", e);
           throw new InvalidOperationException(
             "Unable to change partition or table. Database " + dbname + " does not exist"
               + " Check metastore logs for detailed stack." + e.getMessage());

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 8ed4310..cf2e25b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -28,8 +28,8 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Multimaps;
 
 import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -146,7 +146,7 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName;
  * TODO:pc remove application logic to a separate interface.
  */
 public class HiveMetaStore extends ThriftHiveMetastore {
-  public static final Log LOG = LogFactory.getLog(HiveMetaStore.class);
+  public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStore.class);
 
   // boolean that tells if the HiveMetaStore (remote) server is being used.
   // Can be used to determine if the calls to metastore api (HMSHandler) are being made with
@@ -197,7 +197,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   }
 
   public static class HMSHandler extends FacebookBase implements IHMSHandler {
-    public static final Log LOG = HiveMetaStore.LOG;
+    public static final Logger LOG = HiveMetaStore.LOG;
     private String rawStoreClassName;
     private final HiveConf hiveConf; // stores datastore (jpox) properties,
                                      // right now they come from jpox.properties
@@ -242,7 +242,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         "ugi=%s\t" + // ugi
             "ip=%s\t" + // remote IP
             "cmd=%s\t"; // command
-    public static final Log auditLog = LogFactory.getLog(
+    public static final Logger auditLog = LoggerFactory.getLogger(
         HiveMetaStore.class.getName() + ".audit");
     private static final ThreadLocal<Formatter> auditFormatter =
         new ThreadLocal<Formatter>() {
@@ -509,7 +509,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
           rs.setConf(conf);
           return rs;
         } catch (Exception e) {
-          LOG.fatal("Unable to instantiate raw store directly in fastpath mode");
+          LOG.error("Unable to instantiate raw store directly in fastpath mode", e);
           throw new RuntimeException(e);
         }
       }
@@ -5147,7 +5147,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         }
       } catch (Exception original) {
         ex = original;
-        LOG.error(original);
+        LOG.error("Exception caught in mark partition event ", original);
         if (original instanceof NoSuchObjectException) {
           throw (NoSuchObjectException) original;
         } else if (original instanceof UnknownTableException) {
@@ -5180,7 +5180,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       try {
         ret = getMS().isPartitionMarkedForEvent(db_name, tbl_name, partName, evtType);
       } catch (Exception original) {
-        LOG.error(original);
+        LOG.error("Exception caught for isPartitionMarkedForEvent ",original);
         ex = original;
         if (original instanceof NoSuchObjectException) {
           throw (NoSuchObjectException) original;
@@ -6197,8 +6197,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       houseKeeper.start(conf);
     }
     catch (Exception ex) {
-      LOG.fatal("Failed to start " + houseKeeper.getClass() +
-        ".  The system will not handle " + houseKeeper.getServiceDescription()  +
+      LOG.error("Failed to start {}" , houseKeeper.getClass() +
+        ".  The system will not handle {} " , houseKeeper.getServiceDescription(),
         ".  Root Cause: ", ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 3105a09..3960f5d 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -47,8 +47,8 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.security.auth.login.LoginException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -187,7 +187,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
   private int retries = 5;
   private long retryDelaySeconds = 0;
 
-  static final protected Log LOG = LogFactory.getLog("hive.metastore");
+  static final protected Logger LOG = LoggerFactory.getLogger("hive.metastore");
 
   public HiveMetaStoreClient(HiveConf conf)
     throws MetaException {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
index b255090..9e7dcfc 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.metastore;
 
 import java.io.FileNotFoundException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,8 +32,8 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 
 public class HiveMetaStoreFsImpl implements MetaStoreFS {
 
-  public static final Log LOG = LogFactory
-      .getLog("hive.metastore.hivemetastoressimpl");
+  public static final Logger LOG = LoggerFactory
+      .getLogger("hive.metastore.hivemetastoressimpl");
 
   @Override
   public boolean deleteDir(FileSystem fs, Path f, boolean recursive,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index 3c1c616..08153ca 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -38,8 +38,8 @@ import javax.jdo.Transaction;
 import javax.jdo.datastore.JDOConnection;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -95,7 +95,7 @@ class MetaStoreDirectSql {
 
   private static final int NO_BATCHING = -1, DETECT_BATCHING = 0;
 
-  private static final Log LOG = LogFactory.getLog(MetaStoreDirectSql.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MetaStoreDirectSql.class);
   private final PersistenceManager pm;
   /**
    * We want to avoid db-specific code in this class and stick with ANSI SQL. However:

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java
index 4a139a9..6123a1e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  */
 public class MetaStoreInit {
 
-  private static final Log LOG = LogFactory.getLog(MetaStoreInit.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MetaStoreInit.class);
 
   static class MetaStoreInitData {
     JDOConnectionURLHook urlHook = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 3fde18e..73b7574 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -44,8 +44,8 @@ import com.google.common.base.Predicates;
 import com.google.common.collect.Maps;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -87,7 +87,7 @@ import javax.annotation.Nullable;
 
 public class MetaStoreUtils {
 
-  protected static final Log LOG = LogFactory.getLog("hive.log");
+  protected static final Logger LOG = LoggerFactory.getLogger("hive.log");
 
   public static final String DEFAULT_DATABASE_NAME = "default";
   public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 0f98963..9f2f5f4 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -57,8 +57,8 @@ import javax.jdo.identity.IntIdentity;
 import com.google.common.annotations.VisibleForTesting;
 import org.antlr.runtime.CommonTokenStream;
 import org.antlr.runtime.RecognitionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -179,7 +179,7 @@ public class ObjectStore implements RawStore, Configurable {
   * Verify the schema only once per JVM since the db connection info is static
   */
   private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false);
-  private static final Log LOG = LogFactory.getLog(ObjectStore.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class.getName());
 
   private static enum TXN_STATUS {
     NO_STATE, OPEN, COMMITED, ROLLBACK
@@ -499,14 +499,14 @@ public class ObjectStore implements RawStore, Configurable {
       RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = "
           + openTrasactionCalls + ". This probably indicates that there are unbalanced " +
           "calls to openTransaction/commitTransaction");
-      LOG.error(e);
+      LOG.error("Unbalanced calls to open/commit Transaction", e);
       throw e;
     }
     if (!currentTransaction.isActive()) {
       RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = "
           + openTrasactionCalls + ". This probably indicates that there are unbalanced " +
           "calls to openTransaction/commitTransaction");
-      LOG.error(e);
+      LOG.error("Unbalanced calls to open/commit Transaction", e);
       throw e;
     }
     openTrasactionCalls--;
@@ -7695,7 +7695,7 @@ public class ObjectStore implements RawStore, Configurable {
         classLoaderResolverMap.set(nc, new HashMap<String, ClassLoaderResolver>());
         LOG.debug("Removed cached classloaders from DataNucleus NucleusContext");
       } catch (Exception e) {
-        LOG.warn(e);
+        LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext ", e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java b/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
index 5766bdd..e38e8dd 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.metastore;
 
 import org.antlr.runtime.CommonTokenStream;
 import org.antlr.runtime.RecognitionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
  * Utility functions for working with partition filter expressions
  */
 public class PartFilterExprUtil {
-  private static final Log LOG = LogFactory.getLog(PartFilterExprUtil.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(PartFilterExprUtil.class.getName());
 
 
   public static ExpressionTree makeExpressionTree(PartitionExpressionProxy expressionProxy,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
index c3755ef..f28e232 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
@@ -27,8 +27,6 @@ import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.ClassUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
@@ -40,12 +38,9 @@ import org.apache.hadoop.util.ReflectionUtils;
 @InterfaceStability.Evolving
 public class RawStoreProxy implements InvocationHandler {
 
-  static final private Log LOG = LogFactory.getLog(RawStoreProxy.class.getName());
-
   private final RawStore base;
   private final MetaStoreInit.MetaStoreInitData metaStoreInitData =
     new MetaStoreInit.MetaStoreInitData();
-  private final int id;
   private final HiveConf hiveConf;
   private final Configuration conf; // thread local conf from HMS
 
@@ -53,7 +48,6 @@ public class RawStoreProxy implements InvocationHandler {
       Class<? extends RawStore> rawStoreClass, int id) throws MetaException {
     this.conf = conf;
     this.hiveConf = hiveConf;
-    this.id = id;
 
     // This has to be called before initializing the instance of RawStore
     init();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
index 56276b6..f01849d 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
@@ -26,8 +26,8 @@ import java.lang.reflect.UndeclaredThrowableException;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
@@ -41,7 +41,7 @@ import org.datanucleus.exceptions.NucleusException;
 @InterfaceStability.Evolving
 public class RetryingHMSHandler implements InvocationHandler {
 
-  private static final Log LOG = LogFactory.getLog(RetryingHMSHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RetryingHMSHandler.class);
   private static final String CLASS_NAME = RetryingHMSHandler.class.getName();
 
   private static class Result {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
index 5087098..2b05837 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
@@ -27,8 +27,8 @@ import java.lang.reflect.UndeclaredThrowableException;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -48,7 +48,7 @@ import org.apache.thrift.transport.TTransportException;
 @Public
 public class RetryingMetaStoreClient implements InvocationHandler {
 
-  private static final Log LOG = LogFactory.getLog(RetryingMetaStoreClient.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(RetryingMetaStoreClient.class.getName());
 
   private final IMetaStoreClient base;
   private final int retryLimit;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java b/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
index ec8d608..89f4701 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
@@ -25,8 +25,8 @@ import java.security.PrivilegedExceptionAction;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_args;
@@ -57,7 +57,7 @@ public class TUGIBasedProcessor<I extends Iface> extends TSetIpAddressProcessor<
   private final I iface;
   private final Map<String,  org.apache.thrift.ProcessFunction<Iface, ? extends  TBase>>
     functions;
-  static final Log LOG = LogFactory.getLog(TUGIBasedProcessor.class);
+  static final Logger LOG = LoggerFactory.getLogger(TUGIBasedProcessor.class);
 
   public TUGIBasedProcessor(I iface) throws SecurityException, NoSuchFieldException,
     IllegalArgumentException, IllegalAccessException, NoSuchMethodException,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
index 7aab2c7..d616946 100755
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -34,8 +34,8 @@ import java.util.regex.Pattern;
 
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
@@ -63,7 +63,7 @@ public class Warehouse {
   private final Configuration conf;
   private final String whRootString;
 
-  public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse");
+  public static final Logger LOG = LoggerFactory.getLogger("hive.metastore.warehouse");
 
   private MetaStoreFS fsHandler = null;
   private boolean storageAuthCheck = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java
index df82bce..7f99f18 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hive.metastore.events;
 
 import java.util.TimerTask;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
 import org.apache.hadoop.hive.metastore.RawStore;
 
 public class EventCleanerTask extends TimerTask{
 
-  public static final Log LOG = LogFactory.getLog(EventCleanerTask.class);
+  public static final Logger LOG = LoggerFactory.getLogger(EventCleanerTask.class);
   private final HMSHandler handler;
 
   public EventCleanerTask(HMSHandler handler) {
@@ -46,7 +46,7 @@ public class EventCleanerTask extends TimerTask{
         LOG.info("Number of events deleted from event Table: "+deleteCnt);
       }
     } catch (Exception e) {
-      LOG.error(e);
+      LOG.error("Exception while trying to delete events ", e);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
index 89c3e7b..4ca4229 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.metastore.hbase;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -36,8 +36,8 @@ import java.util.List;
  * Filter for scanning aggregates stats table
  */
 public class AggrStatsInvalidatorFilter extends FilterBase {
-  private static final Log LOG =
-      LogFactory.getLog(AggrStatsInvalidatorFilter.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AggrStatsInvalidatorFilter.class.getName());
   private final List<HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> entries;
   private final long runEvery;
   private final long maxCacheEntryLife;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
index fac8e90..ba5cb22 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
@@ -26,8 +26,8 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Deadline;
@@ -73,7 +73,7 @@ import java.util.concurrent.TimeUnit;
  */
 public class HBaseImport {
 
-  static final private Log LOG = LogFactory.getLog(HBaseImport.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(HBaseImport.class.getName());
 
   public static int main(String[] args) {
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index ffd3ee5..1ac060b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.metastore.hbase;
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -143,7 +143,7 @@ public class HBaseReadWrite {
   @VisibleForTesting final static String TEST_CONN = "test_connection";
   private static HBaseConnection testConn;
 
-  static final private Log LOG = LogFactory.getLog(HBaseReadWrite.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(HBaseReadWrite.class.getName());
 
   private static ThreadLocal<HBaseReadWrite> self = new ThreadLocal<HBaseReadWrite>() {
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 67a02d9..5cc7c30 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -22,8 +22,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.cache.CacheLoader;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -92,7 +92,7 @@ import java.util.Set;
  * Implementation of RawStore that stores data in HBase
  */
 public class HBaseStore implements RawStore {
-  static final private Log LOG = LogFactory.getLog(HBaseStore.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(HBaseStore.class.getName());
 
   // Do not access this directly, call getHBase to make sure it is initialized.
   private HBaseReadWrite hbase = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
index 1885089..f4f30d7 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
@@ -23,8 +23,8 @@ import com.google.protobuf.ByteString;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
@@ -96,7 +96,7 @@ class HBaseUtils {
   final static char KEY_SEPARATOR = '\u0001';
   final static String KEY_SEPARATOR_STR = new String(new char[] {KEY_SEPARATOR});
 
-  static final private Log LOG = LogFactory.getLog(HBaseUtils.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(HBaseUtils.class.getName());
 
   /**
    * Build a key for an object in hbase

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java
index 01fe403..2b0863d 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java
@@ -24,8 +24,8 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type;
@@ -43,7 +43,7 @@ import org.apache.hadoop.io.BytesWritable;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 public class PartitionKeyComparator extends ByteArrayComparable {
-  private static final Log LOG = LogFactory.getLog(PartitionKeyComparator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PartitionKeyComparator.class);
   static class Mark {
     Mark(String value, boolean inclusive) {
       this.value = value;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java
index d772dca..de3b17b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
@@ -41,7 +41,7 @@ import java.util.List;
  * copies.
  */
 public class SharedStorageDescriptor extends StorageDescriptor {
-  static final private Log LOG = LogFactory.getLog(SharedStorageDescriptor.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(SharedStorageDescriptor.class.getName());
   private boolean colsCopied = false;
   private boolean serdeCopied = false;
   private boolean bucketsCopied = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java
index 42efe94..5ec60be 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java
@@ -23,8 +23,8 @@ import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.protobuf.ByteString;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
@@ -52,7 +52,7 @@ import java.util.concurrent.locks.ReentrantLock;
  */
 class StatsCache {
 
-  private static final Log LOG = LogFactory.getLog(StatsCache.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(StatsCache.class.getName());
   private static StatsCache self = null;
 
   private LoadingCache<StatsCacheKey, AggrStats> cache;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java
index f9c6e73..f66200f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java
@@ -28,8 +28,8 @@ import co.cask.tephra.distributed.TransactionServiceClient;
 import co.cask.tephra.hbase10.TransactionAwareHTable;
 import co.cask.tephra.hbase10.coprocessor.TransactionProcessor;
 import co.cask.tephra.inmemory.InMemoryTxSystemClient;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -44,7 +44,7 @@ import java.util.Map;
  * A class that uses Tephra for transaction management.
  */
 public class TephraHBaseConnection extends VanillaHBaseConnection {
-  static final private Log LOG = LogFactory.getLog(TephraHBaseConnection.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(TephraHBaseConnection.class.getName());
 
   private Map<String, TransactionAware> txnTables;
   private TransactionContext txn;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java
index 25334a3..e631580 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -40,7 +40,7 @@ import java.util.Map;
  * A pass through to a simple HBase connection.  This has no transactions.
  */
 public class VanillaHBaseConnection implements HBaseConnection {
-  static final private Log LOG = LogFactory.getLog(VanillaHBaseConnection.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(VanillaHBaseConnection.class.getName());
 
   protected HConnection conn;
   protected Map<String, HTableInterface> tables;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java
index 6a1b315..7e94e34 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.metastore.partition.spec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
index e4e9e3a..22e246f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
@@ -33,8 +33,8 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.ObjectStore;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hive.metastore.ObjectStore;
 
 public class HiveMetaTool {
 
-  private static final Log LOG = LogFactory.getLog(HiveMetaTool.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(HiveMetaTool.class.getName());
   private final Options cmdLineOptions = new Options();
   private ObjectStore objStore;
   private boolean isObjStoreInitialized;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
index 44ee5c6..4d9e8ae 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -34,7 +34,7 @@ import java.util.*;
  */
 public class CompactionTxnHandler extends TxnHandler {
   static final private String CLASS_NAME = CompactionTxnHandler.class.getName();
-  static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+  static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   // Always access COMPACTION_QUEUE before COMPLETED_TXN_COMPONENTS
   // See TxnHandler for notes on how to deal with deadlocks.  Follow those notes.