You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2017/07/24 02:15:30 UTC

[1/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Repository: hadoop
Updated Branches:
  refs/heads/branch-2 c12bf9a12 -> 7e583a382


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
index bff5eec..5b32798 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
@@ -23,9 +23,9 @@ import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This is a file handle use by the NFS clients.
@@ -33,7 +33,7 @@ import org.apache.hadoop.oncrpc.XDR;
  * on subsequent operations to reference the file.
  */
 public class FileHandle {
-  private static final Log LOG = LogFactory.getLog(FileHandle.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileHandle.class);
   private static final String HEXES = "0123456789abcdef";
   private static final int HANDLE_LEN = 32;
   private byte[] handle; // Opaque handle

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
index 80faca5..00e6d9f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
@@ -17,13 +17,13 @@
  */
 package org.apache.hadoop.nfs.nfs3;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.SimpleTcpServer;
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.util.ExitUtil.terminate;
 
@@ -32,7 +32,7 @@ import static org.apache.hadoop.util.ExitUtil.terminate;
  * Only TCP server is supported and UDP is not supported.
  */
 public abstract class Nfs3Base {
-  public static final Log LOG = LogFactory.getLog(Nfs3Base.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Nfs3Base.class);
   private final RpcProgram rpcProgram;
   private int nfsBoundPort; // Will set after server starts
 
@@ -54,7 +54,7 @@ public abstract class Nfs3Base {
       try {
         rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
       } catch (Throwable e) {
-        LOG.fatal("Failed to register the NFSv3 service.", e);
+        LOG.error("Failed to register the NFSv3 service.", e);
         terminate(1, e);
       }
     }
@@ -67,7 +67,7 @@ public abstract class Nfs3Base {
     try {
       tcpServer.run();
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the TCP server.", e);
+      LOG.error("Failed to start the TCP server.", e);
       if (tcpServer.getBoundPort() > 0) {
         rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP,
             tcpServer.getBoundPort());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
index 7ba37c9..c8528ba 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
@@ -19,18 +19,19 @@ package org.apache.hadoop.oncrpc;
 
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.MessageEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple client that registers an RPC program with portmap.
  */
 public class RegistrationClient extends SimpleTcpClient {
-  public static final Log LOG = LogFactory.getLog(RegistrationClient.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(RegistrationClient.class);
 
   public RegistrationClient(String host, int port, XDR request) {
     super(host, port, request);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
index aa4b948..0ae3c23 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.oncrpc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.security.Credentials;
 import org.apache.hadoop.oncrpc.security.Verifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents an RPC message of type RPC call as defined in RFC 1831
  */
 public class RpcCall extends RpcMessage {
   public static final int RPC_VERSION = 2;
-  private static final Log LOG = LogFactory.getLog(RpcCall.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RpcCall.class);
   
   public static RpcCall read(XDR xdr) {
     return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
@@ -60,7 +60,7 @@ public class RpcCall extends RpcMessage {
     this.credentials = credential;
     this.verifier = verifier;
     if (LOG.isTraceEnabled()) {
-      LOG.trace(this);
+      LOG.trace(this.toString());
     }
     validate();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
index c541cd6..5c059aa 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
@@ -23,8 +23,6 @@ import java.net.InetSocketAddress;
 import java.net.SocketAddress;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
 import org.apache.hadoop.oncrpc.security.Verifier;
 import org.apache.hadoop.oncrpc.security.VerifierNone;
@@ -35,13 +33,15 @@ import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class for writing RPC server programs based on RFC 1050. Extend this class
  * and implement {@link #handleInternal} to handle the requests received.
  */
 public abstract class RpcProgram extends SimpleChannelUpstreamHandler {
-  static final Log LOG = LogFactory.getLog(RpcProgram.class);
+  static final Logger LOG = LoggerFactory.getLogger(RpcProgram.class);
   public static final int RPCB_PORT = 111;
   private final String program;
   private final String host;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
index cbc9943..cebebd2 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.oncrpc;
 
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.Channel;
@@ -29,6 +27,8 @@ import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.handler.codec.frame.FrameDecoder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class RpcUtil {
   /**
@@ -63,7 +63,8 @@ public final class RpcUtil {
    * each RPC client.
    */
   static class RpcFrameDecoder extends FrameDecoder {
-    public static final Log LOG = LogFactory.getLog(RpcFrameDecoder.class);
+    public static final Logger LOG =
+        LoggerFactory.getLogger(RpcFrameDecoder.class);
     private ChannelBuffer currentFrame;
 
     @Override
@@ -107,8 +108,8 @@ public final class RpcUtil {
    * request into a RpcInfo instance.
    */
   static final class RpcMessageParserStage extends SimpleChannelUpstreamHandler {
-    private static final Log LOG = LogFactory
-        .getLog(RpcMessageParserStage.class);
+    private static final Logger LOG = LoggerFactory
+        .getLogger(RpcMessageParserStage.class);
 
     @Override
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
index b72153a..23b6682 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
@@ -17,20 +17,21 @@
  */
 package org.apache.hadoop.oncrpc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.ChannelStateEvent;
 import org.jboss.netty.channel.ExceptionEvent;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple TCP based RPC client handler used by {@link SimpleTcpServer}.
  */
 public class SimpleTcpClientHandler extends SimpleChannelHandler {
-  public static final Log LOG = LogFactory.getLog(SimpleTcpClient.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleTcpClient.class);
   protected final XDR request;
 
   public SimpleTcpClientHandler(XDR request) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
index bd48b15..177fa3d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.oncrpc;
 import java.net.InetSocketAddress;
 import java.util.concurrent.Executors;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.bootstrap.ServerBootstrap;
 import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.ChannelFactory;
@@ -30,12 +28,15 @@ import org.jboss.netty.channel.ChannelPipelineFactory;
 import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple UDP server implemented using netty.
  */
 public class SimpleTcpServer {
-  public static final Log LOG = LogFactory.getLog(SimpleTcpServer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleTcpServer.class);
   protected final int port;
   protected int boundPort = -1; // Will be set after server starts
   protected final SimpleChannelUpstreamHandler rpcProgram;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
index d691aba..e65003c 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
@@ -20,20 +20,21 @@ package org.apache.hadoop.oncrpc;
 import java.net.InetSocketAddress;
 import java.util.concurrent.Executors;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.bootstrap.ConnectionlessBootstrap;
 import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.channel.socket.DatagramChannelFactory;
 import org.jboss.netty.channel.socket.nio.NioDatagramChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple UDP server implemented based on netty.
  */
 public class SimpleUdpServer {
-  public static final Log LOG = LogFactory.getLog(SimpleUdpServer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleUdpServer.class);
   private final int SEND_BUFFER_SIZE = 65536;
   private final int RECEIVE_BUFFER_SIZE = 65536;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
index fe4350b..64edf48 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
@@ -18,16 +18,16 @@
 package org.apache.hadoop.oncrpc.security;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for all credentials. Currently we only support 3 different types
  * of auth flavors: AUTH_NONE, AUTH_SYS, and RPCSEC_GSS.
  */
 public abstract class Credentials extends RpcAuthInfo {
-  public static final Log LOG = LogFactory.getLog(Credentials.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Credentials.class);
 
   public static Credentials readFlavorAndCredentials(XDR xdr) {
     AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
index 93efba8..4a674e8 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
@@ -19,13 +19,14 @@ package org.apache.hadoop.oncrpc.security;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class SecurityHandler {
-  public static final Log LOG = LogFactory.getLog(SecurityHandler.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SecurityHandler.class);
   
   public abstract String getUser();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
index 7586fda..123999d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
@@ -22,8 +22,6 @@ import java.net.SocketAddress;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.RpcUtil;
 import org.apache.hadoop.util.StringUtils;
@@ -41,12 +39,14 @@ import org.jboss.netty.handler.timeout.IdleStateHandler;
 import org.jboss.netty.util.HashedWheelTimer;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Portmap service for binding RPC protocols. See RFC 1833 for details.
  */
 final class Portmap {
-  private static final Log LOG = LogFactory.getLog(Portmap.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Portmap.class);
   private static final int DEFAULT_IDLE_TIME_MILLISECONDS = 5000;
 
   private ConnectionlessBootstrap udpServer;
@@ -65,7 +65,7 @@ final class Portmap {
       pm.start(DEFAULT_IDLE_TIME_MILLISECONDS,
           new InetSocketAddress(port), new InetSocketAddress(port));
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the server. Cause:", e);
+      LOG.error("Failed to start the server. Cause:", e);
       pm.shutdown();
       System.exit(-1);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
index 67175d0..0bc380f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.portmap;
 
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.RpcInfo;
@@ -39,6 +37,8 @@ import org.jboss.netty.channel.group.ChannelGroup;
 import org.jboss.netty.handler.timeout.IdleState;
 import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler;
 import org.jboss.netty.handler.timeout.IdleStateEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler {
   static final int PROGRAM = 100000;
@@ -51,7 +51,8 @@ final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler {
   static final int PMAPPROC_DUMP = 4;
   static final int PMAPPROC_GETVERSADDR = 9;
 
-  private static final Log LOG = LogFactory.getLog(RpcProgramPortmap.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RpcProgramPortmap.class);
 
   private final ConcurrentHashMap<String, PortmapMapping> map = new ConcurrentHashMap<String, PortmapMapping>();
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[4/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Posted by aa...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
index 5f50998..fdb1073 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.metrics.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The MetricsTimeVaryingRate class is for a rate based metric that
@@ -36,8 +36,8 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 public class MetricsTimeVaryingRate extends MetricsBase {
 
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
+  private static final Logger LOG =
+      LoggerFactory.getLogger("org.apache.hadoop.metrics.util");
 
   static class Metrics {
     int numOperations = 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java
index a76acac..5282119 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java
@@ -106,7 +106,7 @@ class MBeanInfoBuilder implements MetricsVisitor {
       }
       ++curRecNo;
     }
-    MetricsSystemImpl.LOG.debug(attrs);
+    MetricsSystemImpl.LOG.debug(attrs.toString());
     MBeanAttributeInfo[] attrsArray = new MBeanAttributeInfo[attrs.size()];
     return new MBeanInfo(name, description, attrs.toArray(attrsArray),
                          null, null, null); // no ops/ctors/notifications

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
index 001b731..0afd621 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
@@ -39,18 +39,18 @@ import org.apache.commons.configuration.Configuration;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsPlugin;
 import org.apache.hadoop.metrics2.filter.GlobFilter;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Metrics configuration for MetricsSystemImpl
  */
 class MetricsConfig extends SubsetConfiguration {
-  static final Log LOG = LogFactory.getLog(MetricsConfig.class);
+  static final Logger LOG = LoggerFactory.getLogger(MetricsConfig.class);
 
   static final String DEFAULT_FILE_NAME = "hadoop-metrics2.properties";
   static final String PREFIX_DEFAULT = "*.";
@@ -115,7 +115,7 @@ class MetricsConfig extends SubsetConfiguration {
         LOG.info("loaded properties from "+ fname);
         LOG.debug(toString(cf));
         MetricsConfig mc = new MetricsConfig(cf, prefix);
-        LOG.debug(mc);
+        LOG.debug(mc.toString());
         return mc;
       }
       catch (ConfigurationException e) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
index 62498ea..e26092a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
@@ -24,8 +24,6 @@ import java.util.concurrent.*;
 
 import static com.google.common.base.Preconditions.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
@@ -36,13 +34,16 @@ import static org.apache.hadoop.metrics2.util.Contracts.*;
 import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An adapter class for metrics sink and associated filters
  */
 class MetricsSinkAdapter implements SinkQueue.Consumer<MetricsBuffer> {
 
-  private final Log LOG = LogFactory.getLog(MetricsSinkAdapter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MetricsSinkAdapter.class);
   private final String name, description, context;
   private final MetricsSink sink;
   private final MetricsFilter sourceFilter, recordFilter, metricFilter;
@@ -210,7 +211,7 @@ class MetricsSinkAdapter implements SinkQueue.Consumer<MetricsBuffer> {
     stopping = true;
     sinkThread.interrupt();
     if (sink instanceof Closeable) {
-      IOUtils.cleanup(LOG, (Closeable)sink);
+      IOUtils.cleanupWithLogger(LOG, (Closeable)sink);
     }
     try {
       sinkThread.join();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
index 04cd88c..a22018a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
@@ -33,8 +33,6 @@ import static com.google.common.base.Preconditions.*;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsFilter;
@@ -43,6 +41,8 @@ import org.apache.hadoop.metrics2.MetricsTag;
 import static org.apache.hadoop.metrics2.impl.MetricsConfig.*;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.metrics2.util.Contracts.*;
 
@@ -51,7 +51,8 @@ import static org.apache.hadoop.metrics2.util.Contracts.*;
  */
 class MetricsSourceAdapter implements DynamicMBean {
 
-  private static final Log LOG = LogFactory.getLog(MetricsSourceAdapter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MetricsSourceAdapter.class);
 
   private final String prefix, name;
   private final MetricsSource source;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
index e97e948..1166ff9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
@@ -36,8 +36,6 @@ import com.google.common.annotations.VisibleForTesting;
 import static com.google.common.base.Preconditions.*;
 
 import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.math3.util.ArithmeticUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
@@ -62,6 +60,8 @@ import org.apache.hadoop.metrics2.lib.MutableStat;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A base class for metrics system singletons
@@ -70,7 +70,7 @@ import org.apache.hadoop.util.Time;
 @Metrics(context="metricssystem")
 public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
 
-  static final Log LOG = LogFactory.getLog(MetricsSystemImpl.class);
+  static final Logger LOG = LoggerFactory.getLogger(MetricsSystemImpl.class);
   static final String MS_NAME = "MetricsSystem";
   static final String MS_STATS_NAME = MS_NAME +",sub=Stats";
   static final String MS_STATS_DESC = "Metrics system metrics";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
index 9ab9243..8f0442a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
@@ -22,20 +22,21 @@ import java.lang.reflect.Method;
 
 import static com.google.common.base.Preconditions.*;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.annotation.Metric;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.apache.hadoop.metrics2.util.Contracts.*;
 
 /**
  * Metric generated from a method, mostly used by annotation
  */
 class MethodMetric extends MutableMetric {
-  private static final Log LOG = LogFactory.getLog(MethodMetric.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MethodMetric.class);
 
   private final Object obj;
   private final Method method;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
index b44a471..7730cfd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
@@ -24,8 +24,6 @@ import java.lang.reflect.Method;
 
 import static com.google.common.base.Preconditions.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsCollector;
 import org.apache.hadoop.metrics2.MetricsException;
@@ -34,13 +32,16 @@ import org.apache.hadoop.metrics2.MetricsSource;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Helper class to build metrics source object from annotations
  */
 @InterfaceAudience.Private
 public class MetricsSourceBuilder {
-  private static final Log LOG = LogFactory.getLog(MetricsSourceBuilder.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MetricsSourceBuilder.class);
 
   private final Object source;
   private final MutableMetricsFactory factory;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
index 9e7810a..3209990 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
@@ -22,19 +22,20 @@ import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class MutableMetricsFactory {
-  private static final Log LOG = LogFactory.getLog(MutableMetricsFactory.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MutableMetricsFactory.class);
 
   MutableMetric newForField(Field field, Metric annotation,
                             MetricsRegistry registry) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java
index 1074e87..994eb13 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java
@@ -24,12 +24,11 @@ import java.util.Set;
 import static com.google.common.base.Preconditions.*;
 import com.google.common.collect.Sets;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Helper class to manage a group of mutable rate metrics
@@ -43,7 +42,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MutableRates extends MutableMetric {
-  static final Log LOG = LogFactory.getLog(MutableRates.class);
+  static final Logger LOG = LoggerFactory.getLogger(MutableRates.class);
   private final MetricsRegistry registry;
   private final Set<Class<?>> protocolCache = Sets.newHashSet();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java
index 9827ca7..26a1506 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java
@@ -27,12 +27,12 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentLinkedDeque;
 import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.util.SampleStat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -48,7 +48,8 @@ import org.apache.hadoop.metrics2.util.SampleStat;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MutableRatesWithAggregation extends MutableMetric {
-  static final Log LOG = LogFactory.getLog(MutableRatesWithAggregation.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(MutableRatesWithAggregation.class);
   private final Map<String, MutableRate> globalMetrics =
       new ConcurrentHashMap<>();
   private final Set<Class<?>> protocolCache = Sets.newHashSet();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index a61fa5b..6efd193 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.metrics2.sink;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.AbstractMetric;
@@ -28,6 +26,8 @@ import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -42,7 +42,8 @@ import java.nio.charset.StandardCharsets;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class GraphiteSink implements MetricsSink, Closeable {
-    private static final Log LOG = LogFactory.getLog(GraphiteSink.class);
+    private static final Logger LOG =
+        LoggerFactory.getLogger(GraphiteSink.class);
     private static final String SERVER_HOST_KEY = "server_host";
     private static final String SERVER_PORT_KEY = "server_port";
     private static final String METRICS_PREFIX = "metrics_prefix";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
index 0afc9f2..2eb813a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
@@ -26,11 +26,11 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.util.Servers;
 import org.apache.hadoop.net.DNS;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This the base class for Ganglia sink classes using metrics2. Lot of the code
@@ -41,7 +41,7 @@ import org.apache.hadoop.net.DNS;
  */
 public abstract class AbstractGangliaSink implements MetricsSink {
 
-  public final Log LOG = LogFactory.getLog(this.getClass());
+  public final Logger LOG = LoggerFactory.getLogger(this.getClass());
 
   /*
    * Output of "gmetric --help" showing allowable values
@@ -126,7 +126,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
             conf.getString("dfs.datanode.dns.interface", "default"),
             conf.getString("dfs.datanode.dns.nameserver", "default"));
       } catch (UnknownHostException uhe) {
-        LOG.error(uhe);
+        LOG.error(uhe.toString());
         hostName = "UNKNOWN.example.com";
       }
     }
@@ -154,7 +154,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
         datagramSocket = new DatagramSocket();
       }
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString());
     }
 
     // see if sparseMetrics is supported. Default is false

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
index 37f91c9..c110252 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
@@ -27,8 +27,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
@@ -37,6 +35,8 @@ import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.MsInfo;
 import org.apache.hadoop.metrics2.util.MetricsCache;
 import org.apache.hadoop.metrics2.util.MetricsCache.Record;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This code supports Ganglia 3.0
@@ -44,7 +44,7 @@ import org.apache.hadoop.metrics2.util.MetricsCache.Record;
  */
 public class GangliaSink30 extends AbstractGangliaSink {
 
-  public final Log LOG = LogFactory.getLog(this.getClass());
+  public final Logger LOG = LoggerFactory.getLogger(this.getClass());
 
   private static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
index c8315e8..908a79d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.metrics2.sink.ganglia;
 
-import java.io.IOException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
 
 /**
  * This code supports Ganglia 3.1
@@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory;
  */
 public class GangliaSink31 extends GangliaSink30 {
 
-  public final Log LOG = LogFactory.getLog(this.getClass());    
+  public final Logger LOG = LoggerFactory.getLogger(this.getClass());
 
   /**
    * The method sends metrics to Ganglia servers. The method has been taken from

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java
index 8b58ec0..06734c8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java
@@ -25,11 +25,11 @@ import javax.management.InstanceAlreadyExistsException;
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This util class provides a method to register an MBean using
@@ -39,7 +39,7 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class MBeans {
-  private static final Log LOG = LogFactory.getLog(MBeans.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MBeans.class);
   private static final String DOMAIN_PREFIX = "Hadoop:";
   private static final String SERVICE_PREFIX = "service=";
   private static final String NAME_PREFIX = "name=";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
index efcb286..e659846 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
@@ -23,8 +23,6 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.AbstractMetric;
@@ -33,6 +31,8 @@ import org.apache.hadoop.metrics2.MetricsTag;
 
 import com.google.common.base.Objects;
 import com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A metrics cache for sinks that don't support sparse updates.
@@ -40,7 +40,7 @@ import com.google.common.collect.Maps;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MetricsCache {
-  static final Log LOG = LogFactory.getLog(MetricsCache.class);
+  static final Logger LOG = LoggerFactory.getLogger(MetricsCache.class);
   static final int MAX_RECS_PER_NAME_DEFAULT = 1000;
 
   private final Map<String, RecordCache> map = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
index a6dc8e3..81041c1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.net;
 
 import com.google.common.net.InetAddresses;
 import com.sun.istack.Nullable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.net.InetAddress;
 import java.net.NetworkInterface;
@@ -52,7 +52,7 @@ import javax.naming.directory.InitialDirContext;
 @InterfaceStability.Unstable
 public class DNS {
 
-  private static final Log LOG = LogFactory.getLog(DNS.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DNS.class);
 
   /**
    * The cached hostname -initially null.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
index 4050107..8577336 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
@@ -44,8 +44,6 @@ import java.util.concurrent.ConcurrentHashMap;
 
 import javax.net.SocketFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.util.SubnetUtils;
 import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -58,11 +56,13 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class NetUtils {
-  private static final Log LOG = LogFactory.getLog(NetUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(NetUtils.class);
   
   private static Map<String, String> hostToResolved = 
                                      new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
index 3dcb610..02b44a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
@@ -21,13 +21,13 @@ package org.apache.hadoop.net;
 import java.util.*;
 import java.io.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class implements the {@link DNSToSwitchMapping} interface using a 
@@ -145,8 +145,8 @@ public class ScriptBasedMapping extends CachedDNSToSwitchMapping {
       extends AbstractDNSToSwitchMapping {
     private String scriptName;
     private int maxArgs; //max hostnames per call of the script
-    private static final Log LOG =
-        LogFactory.getLog(ScriptBasedMapping.class);
+    private static final Logger LOG =
+        LoggerFactory.getLogger(ScriptBasedMapping.class);
 
     /**
      * Set the configuration and extract the configuration parameters of interest

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
index b50f7e9..f489581 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
@@ -31,9 +31,9 @@ import java.nio.channels.spi.SelectorProvider;
 import java.util.Iterator;
 import java.util.LinkedList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This supports input and output streams for a socket channels. 
@@ -42,7 +42,7 @@ import org.apache.hadoop.util.Time;
 abstract class SocketIOWithTimeout {
   // This is intentionally package private.
 
-  static final Log LOG = LogFactory.getLog(SocketIOWithTimeout.class);    
+  static final Logger LOG = LoggerFactory.getLogger(SocketIOWithTimeout.class);
   
   private SelectableChannel channel;
   private long timeout;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
index 362cf07..ead9a74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
@@ -29,12 +29,12 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -56,7 +56,7 @@ import org.apache.hadoop.conf.Configured;
 @InterfaceStability.Evolving
 public class TableMapping extends CachedDNSToSwitchMapping {
 
-  private static final Log LOG = LogFactory.getLog(TableMapping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TableMapping.class);
   
   public TableMapping() {
     super(new RawTableMapping());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
index 8379fd1..ac118c0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
@@ -29,12 +29,12 @@ import java.nio.channels.ReadableByteChannel;
 import java.nio.ByteBuffer;
 
 import org.apache.commons.lang.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.CloseableReferenceCount;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The implementation of UNIX domain sockets in Java.
@@ -60,7 +60,7 @@ public class DomainSocket implements Closeable {
     }
   }
 
-  static Log LOG = LogFactory.getLog(DomainSocket.class);
+  static final Logger LOG = LoggerFactory.getLogger(DomainSocket.class);
 
   /**
    * True only if we should validate the paths used in
@@ -459,13 +459,13 @@ public class DomainSocket implements Closeable {
             try {
               closeFileDescriptor0(descriptors[i]);
             } catch (Throwable t) {
-              LOG.warn(t);
+              LOG.warn(t.toString());
             }
           } else if (streams[i] != null) {
             try {
               streams[i].close();
             } catch (Throwable t) {
-              LOG.warn(t);
+              LOG.warn(t.toString());
             } finally {
               streams[i] = null; }
           }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
index e1bcf7e..c7af97f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
@@ -33,13 +33,13 @@ import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.Uninterruptibles;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The DomainSocketWatcher watches a set of domain sockets to see when they
@@ -68,7 +68,7 @@ public final class DomainSocketWatcher implements Closeable {
     }
   }
 
-  static Log LOG = LogFactory.getLog(DomainSocketWatcher.class);
+  static final Logger LOG = LoggerFactory.getLogger(DomainSocketWatcher.class);
 
   /**
    * The reason why DomainSocketWatcher is not available, or null if it is
@@ -306,7 +306,7 @@ public final class DomainSocketWatcher implements Closeable {
     try {
       if (closed) {
         handler.handle(sock);
-        IOUtils.cleanup(LOG, sock);
+        IOUtils.cleanupWithLogger(LOG, sock);
         return;
       }
       Entry entry = new Entry(sock, handler);
@@ -411,7 +411,7 @@ public final class DomainSocketWatcher implements Closeable {
             this + ": file descriptor " + sock.fd + " was closed while " +
             "still in the poll(2) loop.");
       }
-      IOUtils.cleanup(LOG, sock);
+      IOUtils.cleanupWithLogger(LOG, sock);
       fdSet.remove(fd);
       return true;
     } else {
@@ -524,7 +524,7 @@ public final class DomainSocketWatcher implements Closeable {
               Entry entry = iter.next();
               entry.getDomainSocket().refCount.unreference();
               entry.getHandler().handle(entry.getDomainSocket());
-              IOUtils.cleanup(LOG, entry.getDomainSocket());
+              IOUtils.cleanupWithLogger(LOG, entry.getDomainSocket());
               iter.remove();
             }
             // Items in toRemove might not be really removed, handle it here

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
index ffa7e2b..b8cfdf7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
@@ -25,13 +25,13 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An implementation of {@link GroupMappingServiceProvider} which
@@ -48,7 +48,8 @@ public class CompositeGroupsMapping
   public static final String MAPPING_PROVIDERS_COMBINED_CONFIG_KEY = MAPPING_PROVIDERS_CONFIG_KEY + ".combined";
   public static final String MAPPING_PROVIDER_CONFIG_PREFIX = GROUP_MAPPING_CONFIG_PREFIX + ".provider";
   
-  private static final Log LOG = LogFactory.getLog(CompositeGroupsMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CompositeGroupsMapping.class);
 
   private List<GroupMappingServiceProvider> providersList = 
 		  new ArrayList<GroupMappingServiceProvider>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index 465f4a8..4fdf11f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -34,8 +34,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -48,6 +46,8 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A class that provides the facilities of reading and writing
@@ -56,7 +56,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class Credentials implements Writable {
-  private static final Log LOG = LogFactory.getLog(Credentials.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Credentials.class);
 
   private  Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>();
   private  Map<Text, Token<? extends TokenIdentifier>> tokenMap = 
@@ -184,7 +184,7 @@ public class Credentials implements Writable {
     } catch(IOException ioe) {
       throw new IOException("Exception reading " + filename, ioe);
     } finally {
-      IOUtils.cleanup(LOG, in);
+      IOUtils.cleanupWithLogger(LOG, in);
     }
   }
 
@@ -207,7 +207,7 @@ public class Credentials implements Writable {
     } catch(IOException ioe) {
       throw new IOException("Exception reading " + filename, ioe);
     } finally {
-      IOUtils.cleanup(LOG, in);
+      IOUtils.cleanupWithLogger(LOG, in);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
index a8fa724..2eb7d6d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
@@ -59,9 +59,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Timer;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A user-to-groups mapping service.
@@ -74,7 +73,7 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class Groups {
-  private static final Log LOG = LogFactory.getLog(Groups.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Groups.class);
   
   private final GroupMappingServiceProvider impl;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java
index f9c1816..47b5a58 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java
@@ -21,20 +21,20 @@ package org.apache.hadoop.security;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.http.FilterInitializer;
 import org.apache.hadoop.security.http.CrossOriginFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HttpCrossOriginFilterInitializer extends FilterInitializer {
 
   public static final String PREFIX = "hadoop.http.cross-origin.";
   public static final String ENABLED_SUFFIX = "enabled";
 
-  private static final Log LOG =
-      LogFactory.getLog(HttpCrossOriginFilterInitializer.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HttpCrossOriginFilterInitializer.class);
 
   @Override
   public void initFilter(FilterContainer container, Configuration conf) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java
index d397e44..a0f6142 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java
@@ -25,9 +25,9 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A JNI-based implementation of {@link GroupMappingServiceProvider} 
@@ -38,8 +38,8 @@ import org.apache.hadoop.util.NativeCodeLoader;
 @InterfaceStability.Evolving
 public class JniBasedUnixGroupsMapping implements GroupMappingServiceProvider {
   
-  private static final Log LOG = 
-    LogFactory.getLog(JniBasedUnixGroupsMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JniBasedUnixGroupsMapping.class);
 
   static {
     if (!NativeCodeLoader.isNativeCodeLoaded()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
index 40333fc..f164430 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
@@ -21,16 +21,16 @@ package org.apache.hadoop.security;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.PerformanceAdvisory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class JniBasedUnixGroupsMappingWithFallback implements
     GroupMappingServiceProvider {
 
-  private static final Log LOG = LogFactory
-      .getLog(JniBasedUnixGroupsMappingWithFallback.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(JniBasedUnixGroupsMappingWithFallback.class);
   
   private GroupMappingServiceProvider impl;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java
index ff4ab98..86dabfb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java
@@ -26,11 +26,11 @@ import java.util.LinkedList;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import org.apache.hadoop.security.NetgroupCache;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A JNI-based implementation of {@link GroupMappingServiceProvider} 
@@ -42,8 +42,8 @@ import org.apache.hadoop.security.NetgroupCache;
 public class JniBasedUnixGroupsNetgroupMapping
   extends JniBasedUnixGroupsMapping {
   
-  private static final Log LOG = LogFactory.getLog(
-    JniBasedUnixGroupsNetgroupMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JniBasedUnixGroupsNetgroupMapping.class);
 
   native String[] getUsersForNetgroupJNI(String group);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java
index 7d77c10..fcc47cb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java
@@ -21,15 +21,15 @@ package org.apache.hadoop.security;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class JniBasedUnixGroupsNetgroupMappingWithFallback implements
     GroupMappingServiceProvider {
 
-  private static final Log LOG = LogFactory
-      .getLog(JniBasedUnixGroupsNetgroupMappingWithFallback.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(JniBasedUnixGroupsNetgroupMappingWithFallback.class);
 
   private GroupMappingServiceProvider impl;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
index 1a184e8..babfa38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
@@ -41,12 +41,12 @@ import javax.naming.directory.SearchResult;
 import javax.naming.ldap.LdapName;
 import javax.naming.ldap.Rdn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An implementation of {@link GroupMappingServiceProvider} which
@@ -211,7 +211,8 @@ public class LdapGroupsMapping
       LDAP_CONFIG_PREFIX + ".read.timeout.ms";
   public static final int READ_TIMEOUT_DEFAULT = 60 * 1000; // 60 seconds
 
-  private static final Log LOG = LogFactory.getLog(LdapGroupsMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(LdapGroupsMapping.class);
 
   static final SearchControls SEARCH_CONTROLS = new SearchControls();
   static {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
index 013e56c..8dcf8b9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
@@ -26,14 +26,14 @@ import java.net.URL;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
 import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
 import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Utility methods for both key and credential provider APIs.
@@ -57,7 +57,8 @@ public final class ProviderUtils {
       "Please review the documentation regarding provider passwords in\n" +
       "the keystore passwords section of the Credential Provider API\n";
 
-  private static final Log LOG = LogFactory.getLog(ProviderUtils.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ProviderUtils.class);
 
   /**
    * Hidden ctor to ensure that this utility class isn't

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java
index a3d66b9..a91a90a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java
@@ -30,10 +30,10 @@ import javax.security.sasl.SaslClient;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A SaslInputStream is composed of an InputStream and a SaslServer (or
@@ -45,7 +45,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class SaslInputStream extends InputStream implements ReadableByteChannel {
-  public static final Log LOG = LogFactory.getLog(SaslInputStream.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SaslInputStream.class);
 
   private final DataInputStream inStream;
   /** Should we wrap the communication channel? */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
index bef877c..5b14ebe 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
@@ -45,8 +45,6 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslClient;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -74,13 +72,16 @@ import org.apache.hadoop.util.ProtoUtil;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.ByteString;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  * A utility class that encapsulates SASL logic for RPC client
  */
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class SaslRpcClient {
-  public static final Log LOG = LogFactory.getLog(SaslRpcClient.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SaslRpcClient.class);
 
   private final UserGroupInformation ugi;
   private final Class<?> protocol;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
index 377a0f1..653bf07 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
@@ -45,8 +45,6 @@ import javax.security.sasl.SaslServer;
 import javax.security.sasl.SaslServerFactory;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -57,6 +55,8 @@ import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A utility class for dealing with SASL on RPC server
@@ -64,7 +64,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class SaslRpcServer {
-  public static final Log LOG = LogFactory.getLog(SaslRpcServer.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SaslRpcServer.class);
   public static final String SASL_DEFAULT_REALM = "default";
   private static SaslServerFactory saslFactory;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index b7d1ec0..20e8754 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -36,8 +36,6 @@ import javax.annotation.Nullable;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.kerberos.KerberosTicket;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -51,7 +49,8 @@ import org.apache.hadoop.security.token.TokenInfo;
 import org.apache.hadoop.util.StopWatch;
 import org.apache.hadoop.util.StringUtils;
 
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 //this will need to be replaced someday when there is a suitable replacement
 import sun.net.dns.ResolverConfiguration;
 import sun.net.util.IPAddressUtil;
@@ -64,7 +63,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public final class SecurityUtil {
-  public static final Log LOG = LogFactory.getLog(SecurityUtil.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SecurityUtil.class);
   public static final String HOSTNAME_PATTERN = "_HOST";
   public static final String FAILED_TO_GET_UGI_MSG_HEADER = 
       "Failed to obtain user group information:";
@@ -473,7 +472,7 @@ public final class SecurityUtil {
       try { 
         ugi = UserGroupInformation.getLoginUser();
       } catch (IOException e) {
-        LOG.fatal("Exception while getting login user", e);
+        LOG.error("Exception while getting login user", e);
         e.printStackTrace();
         Runtime.getRuntime().exit(-1);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
index fae556f..da930b4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
@@ -29,14 +29,14 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple shell-based implementation of {@link IdMappingServiceProvider} 
@@ -62,8 +62,8 @@ import com.google.common.collect.HashBiMap;
  */
 public class ShellBasedIdMapping implements IdMappingServiceProvider {
 
-  private static final Log LOG =
-      LogFactory.getLog(ShellBasedIdMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ShellBasedIdMapping.class);
 
   private final static String OS = System.getProperty("os.name");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java
index 4aa4e9f..eff6985 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java
@@ -23,12 +23,12 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ExitCodeException;
 
 import org.apache.hadoop.security.NetgroupCache;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple shell-based implementation of {@link GroupMappingServiceProvider} 
@@ -40,8 +40,8 @@ import org.apache.hadoop.security.NetgroupCache;
 public class ShellBasedUnixGroupsNetgroupMapping
   extends ShellBasedUnixGroupsMapping {
   
-  private static final Log LOG =
-    LogFactory.getLog(ShellBasedUnixGroupsNetgroupMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ShellBasedUnixGroupsNetgroupMapping.class);
 
   /**
    * Get unix groups (parent) and netgroups for given user

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
index 8d4df64..a64c4de 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
@@ -24,13 +24,13 @@ import java.util.TreeMap;
 
 import javax.security.sasl.Sasl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.SaslPropertiesResolver;
 import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection;
 import org.apache.hadoop.util.CombinedIPWhiteList;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -54,7 +54,8 @@ import org.apache.hadoop.util.StringUtils;
  *
  */
 public class WhitelistBasedResolver extends SaslPropertiesResolver {
-  public static final Log LOG = LogFactory.getLog(WhitelistBasedResolver.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(WhitelistBasedResolver.class);
 
   private static final String FIXEDWHITELIST_DEFAULT_LOCATION = "/etc/hadoop/fixedwhitelist";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
index 8e4a0a5..df783f1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.security.alias;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -27,6 +25,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.ProviderUtils;
 
 import com.google.common.base.Charsets;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.crypto.spec.SecretKeySpec;
 import java.io.IOException;
@@ -60,7 +60,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
  */
 @InterfaceAudience.Private
 public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
-  public static final Log LOG = LogFactory.getLog(
+  public static final Logger LOG = LoggerFactory.getLogger(
       AbstractJavaKeyStoreProvider.class);
   public static final String CREDENTIAL_PASSWORD_ENV_VAR =
       "HADOOP_CREDSTORE_PASSWORD";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
index 9da95dc..4c47348 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
@@ -23,8 +23,6 @@ import java.util.IdentityHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -36,6 +34,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.MachineList;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An authorization manager which handles service-level authorization
@@ -69,8 +69,9 @@ public class ServiceAuthorizationManager {
   public static final String SERVICE_AUTHORIZATION_CONFIG = 
     "hadoop.security.authorization";
   
-  public static final Log AUDITLOG =
-    LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName());
+  public static final Logger AUDITLOG =
+      LoggerFactory.getLogger(
+          "SecurityLogger." + ServiceAuthorizationManager.class.getName());
 
   private static final String AUTHZ_SUCCESSFUL_FOR = "Authorization successful for ";
   private static final String AUTHZ_FAILED_FOR = "Authorization failed for ";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java
index ea78762..58d50cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java
@@ -35,14 +35,15 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CrossOriginFilter implements Filter {
 
-  private static final Log LOG = LogFactory.getLog(CrossOriginFilter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CrossOriginFilter.class);
 
   // HTTP CORS Request Headers
   static final String ORIGIN = "Origin";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
index 4e59010..405059b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
@@ -18,12 +18,12 @@
 package org.apache.hadoop.security.ssl;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.ssl.KeyManager;
 import javax.net.ssl.KeyManagerFactory;
@@ -47,8 +47,8 @@ import java.text.MessageFormat;
 @InterfaceStability.Evolving
 public class FileBasedKeyStoresFactory implements KeyStoresFactory {
 
-  private static final Log LOG =
-    LogFactory.getLog(FileBasedKeyStoresFactory.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FileBasedKeyStoresFactory.class);
 
   public static final String SSL_KEYSTORE_LOCATION_TPL_KEY =
     "ssl.{0}.keystore.location";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
index 597f8d7..3be3635 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.security.ssl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.ssl.TrustManager;
 import javax.net.ssl.TrustManagerFactory;
@@ -47,7 +47,8 @@ public final class ReloadingX509TrustManager
   implements X509TrustManager, Runnable {
 
   @VisibleForTesting
-  static final Log LOG = LogFactory.getLog(ReloadingX509TrustManager.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(ReloadingX509TrustManager.class);
   @VisibleForTesting
   static final String RELOAD_ERROR_MESSAGE =
       "Could not load truststore (keep using existing one) : ";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
index 7ff2292..9bf009d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
@@ -23,10 +23,10 @@ import java.util.ArrayList;
 
 import javax.net.ssl.SSLEngine;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.mortbay.jetty.security.SslSelectChannelConnector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This subclass of the Jetty SslSelectChannelConnector exists solely to
@@ -36,8 +36,8 @@ import org.mortbay.jetty.security.SslSelectChannelConnector;
  */
 @InterfaceAudience.Private
 public class SslSelectChannelConnectorSecure extends SslSelectChannelConnector {
-  public static final Log LOG =
-      LogFactory.getLog(SslSelectChannelConnectorSecure.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SslSelectChannelConnectorSecure.class);
 
   public SslSelectChannelConnectorSecure() {
     super();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
index ed699a7..5a15b94 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
@@ -22,13 +22,13 @@ import com.google.common.collect.Maps;
 import com.google.common.primitives.Bytes;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.*;
 import java.util.Arrays;
@@ -42,7 +42,7 @@ import java.util.UUID;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class Token<T extends TokenIdentifier> implements Writable {
-  public static final Log LOG = LogFactory.getLog(Token.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Token.class);
   
   private static Map<Text, Class<? extends TokenIdentifier>> tokenKindMap;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
index cf88745..f06681b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
@@ -30,8 +30,6 @@ import java.util.Set;
 
 import javax.crypto.SecretKey;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
@@ -43,6 +41,8 @@ import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
@@ -50,8 +50,8 @@ public abstract
 class AbstractDelegationTokenSecretManager<TokenIdent 
 extends AbstractDelegationTokenIdentifier> 
    extends SecretManager<TokenIdent> {
-  private static final Log LOG = LogFactory
-      .getLog(AbstractDelegationTokenSecretManager.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(AbstractDelegationTokenSecretManager.class);
 
   private String formatTokenId(TokenIdent id) {
     return "(" + id + ")";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java
index 1327683..2a1140f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java
@@ -25,13 +25,13 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This is the base implementation class for services.
@@ -40,7 +40,8 @@ import com.google.common.annotations.VisibleForTesting;
 @Evolving
 public abstract class AbstractService implements Service {
 
-  private static final Log LOG = LogFactory.getLog(AbstractService.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AbstractService.class);
 
   /**
    * Service name.
@@ -258,7 +259,7 @@ public abstract class AbstractService implements Service {
    */
   protected final void noteFailure(Exception exception) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("noteFailure " + exception, null);
+      LOG.debug("noteFailure " + exception, (Throwable) null);
     }
     if (exception == null) {
       //make sure failure logic doesn't itself cause problems

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
index 51cb4a3..a5e8c89 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.service;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Composition of services.
@@ -34,7 +34,8 @@ import org.apache.hadoop.conf.Configuration;
 @Evolving
 public class CompositeService extends AbstractService {
 
-  private static final Log LOG = LogFactory.getLog(CompositeService.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CompositeService.class);
 
   /**
    * Policy on shutdown: attempt to close everything (purest) or


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[6/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Posted by aa...@apache.org.
HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

This closes #251

Signed-off-by: Akira Ajisaka <aa...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7e583a38
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7e583a38
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7e583a38

Branch: refs/heads/branch-2
Commit: 7e583a38247c433638e4430a1620f2ee58bcbc97
Parents: c12bf9a
Author: wenxinhe <we...@gmail.com>
Authored: Tue Jul 18 19:06:25 2017 +0800
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Mon Jul 24 11:14:08 2017 +0900

----------------------------------------------------------------------
 .../org/apache/hadoop/conf/Configuration.java   | 16 ++++++++--------
 .../apache/hadoop/conf/ReconfigurableBase.java  |  7 ++++---
 .../hadoop/conf/ReconfigurationServlet.java     |  8 ++++----
 .../hadoop/crypto/JceAesCtrCryptoCodec.java     |  8 ++++----
 .../hadoop/crypto/OpensslAesCtrCryptoCodec.java |  8 ++++----
 .../org/apache/hadoop/crypto/OpensslCipher.java |  8 ++++----
 .../crypto/random/OpensslSecureRandom.java      |  8 ++++----
 .../hadoop/crypto/random/OsSecureRandom.java    |  9 +++++----
 .../apache/hadoop/fs/AbstractFileSystem.java    |  6 +++---
 .../java/org/apache/hadoop/fs/ChecksumFs.java   |  8 ++++----
 .../hadoop/fs/DelegationTokenRenewer.java       | 10 +++++-----
 .../org/apache/hadoop/fs/FSInputChecker.java    |  9 +++++----
 .../java/org/apache/hadoop/fs/FileContext.java  | 10 +++++-----
 .../java/org/apache/hadoop/fs/FileUtil.java     | 10 +++++-----
 .../main/java/org/apache/hadoop/fs/FsShell.java |  6 +++---
 .../apache/hadoop/fs/FsShellPermissions.java    |  4 ++--
 .../main/java/org/apache/hadoop/fs/Globber.java |  7 ++++---
 .../org/apache/hadoop/fs/HarFileSystem.java     | 11 ++++++-----
 .../org/apache/hadoop/fs/LocalDirAllocator.java |  9 +++++----
 .../main/java/org/apache/hadoop/fs/Trash.java   |  6 +++---
 .../apache/hadoop/fs/TrashPolicyDefault.java    |  8 ++++----
 .../org/apache/hadoop/fs/ftp/FTPFileSystem.java |  8 ++++----
 .../hadoop/fs/permission/FsPermission.java      |  6 +++---
 .../hadoop/fs/sftp/SFTPConnectionPool.java      |  7 ++++---
 .../apache/hadoop/fs/sftp/SFTPFileSystem.java   |  7 ++++---
 .../org/apache/hadoop/fs/shell/Command.java     |  6 +++---
 .../apache/hadoop/ha/ActiveStandbyElector.java  | 15 ++++++++-------
 .../apache/hadoop/ha/FailoverController.java    | 10 +++++-----
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  8 ++++----
 .../org/apache/hadoop/ha/HealthMonitor.java     |  8 ++++----
 .../java/org/apache/hadoop/ha/NodeFencer.java   |  6 +++---
 .../org/apache/hadoop/ha/SshFenceByTcpPort.java | 11 +++--------
 .../apache/hadoop/ha/ZKFailoverController.java  | 20 ++++++++++----------
 ...HAServiceProtocolServerSideTranslatorPB.java |  6 +++---
 .../java/org/apache/hadoop/http/HttpServer.java |  6 +++---
 .../org/apache/hadoop/http/HttpServer2.java     |  7 ++++---
 .../hadoop/http/lib/StaticUserWebFilter.java    |  7 ++++---
 .../java/org/apache/hadoop/io/BloomMapFile.java |  6 +++---
 .../apache/hadoop/io/FastByteComparisons.java   |  7 +++----
 .../main/java/org/apache/hadoop/io/IOUtils.java |  4 ++--
 .../main/java/org/apache/hadoop/io/MapFile.java |  8 ++++----
 .../org/apache/hadoop/io/ReadaheadPool.java     |  6 +++---
 .../java/org/apache/hadoop/io/SequenceFile.java |  7 ++++---
 .../main/java/org/apache/hadoop/io/UTF8.java    |  5 +++--
 .../apache/hadoop/io/compress/CodecPool.java    |  6 +++---
 .../io/compress/CompressionCodecFactory.java    |  8 ++++----
 .../apache/hadoop/io/compress/DefaultCodec.java |  6 +++---
 .../io/compress/bzip2/Bzip2Compressor.java      |  8 ++++----
 .../io/compress/bzip2/Bzip2Decompressor.java    |  8 ++++----
 .../hadoop/io/compress/bzip2/Bzip2Factory.java  |  6 +++---
 .../hadoop/io/compress/lz4/Lz4Compressor.java   |  8 ++++----
 .../hadoop/io/compress/lz4/Lz4Decompressor.java |  8 ++++----
 .../io/compress/snappy/SnappyCompressor.java    |  8 ++++----
 .../io/compress/snappy/SnappyDecompressor.java  |  8 ++++----
 .../io/compress/zlib/BuiltInZlibDeflater.java   |  8 ++++----
 .../hadoop/io/compress/zlib/ZlibCompressor.java |  8 ++++----
 .../hadoop/io/compress/zlib/ZlibFactory.java    |  7 +++----
 .../org/apache/hadoop/io/file/tfile/BCFile.java |  6 +++---
 .../hadoop/io/file/tfile/Compression.java       |  6 +++---
 .../org/apache/hadoop/io/file/tfile/TFile.java  |  8 ++++----
 .../hadoop/io/file/tfile/TFileDumper.java       |  8 ++++----
 .../org/apache/hadoop/io/nativeio/NativeIO.java | 16 ++++++++--------
 .../nativeio/SharedFileDescriptorFactory.java   |  7 ++++---
 .../apache/hadoop/io/retry/RetryPolicies.java   |  6 +++---
 .../org/apache/hadoop/io/retry/RetryUtils.java  |  6 +++---
 .../io/serializer/SerializationFactory.java     |  8 ++++----
 .../org/apache/hadoop/ipc/CallQueueManager.java |  7 ++++---
 .../main/java/org/apache/hadoop/ipc/Client.java |  6 +++---
 .../org/apache/hadoop/ipc/FairCallQueue.java    |  6 +++---
 .../apache/hadoop/ipc/ProtobufRpcEngine.java    |  7 ++++---
 .../main/java/org/apache/hadoop/ipc/RPC.java    |  6 +++---
 .../org/apache/hadoop/ipc/RefreshRegistry.java  |  7 ++++---
 .../java/org/apache/hadoop/ipc/RetryCache.java  |  6 +++---
 .../main/java/org/apache/hadoop/ipc/Server.java | 16 ++++++++--------
 .../ipc/WeightedRoundRobinMultiplexer.java      |  8 ++++----
 .../apache/hadoop/ipc/WritableRpcEngine.java    |  6 +++---
 .../hadoop/ipc/metrics/RetryCacheMetrics.java   |  6 +++---
 .../hadoop/ipc/metrics/RpcDetailedMetrics.java  |  8 ++++----
 .../apache/hadoop/ipc/metrics/RpcMetrics.java   |  6 +++---
 .../org/apache/hadoop/jmx/JMXJsonServlet.java   |  7 ++++---
 .../org/apache/hadoop/metrics/MetricsUtil.java  |  7 +++----
 .../hadoop/metrics/ganglia/GangliaContext.java  |  8 ++++----
 .../metrics/ganglia/GangliaContext31.java       | 10 +++++-----
 .../apache/hadoop/metrics/jvm/JvmMetrics.java   |  6 +++---
 .../hadoop/metrics/spi/CompositeContext.java    |  8 ++++----
 .../hadoop/metrics/util/MetricsIntValue.java    |  8 ++++----
 .../metrics/util/MetricsTimeVaryingInt.java     |  8 ++++----
 .../metrics/util/MetricsTimeVaryingLong.java    |  8 ++++----
 .../metrics/util/MetricsTimeVaryingRate.java    |  8 ++++----
 .../hadoop/metrics2/impl/MBeanInfoBuilder.java  |  2 +-
 .../hadoop/metrics2/impl/MetricsConfig.java     |  8 ++++----
 .../metrics2/impl/MetricsSinkAdapter.java       |  9 +++++----
 .../metrics2/impl/MetricsSourceAdapter.java     |  7 ++++---
 .../hadoop/metrics2/impl/MetricsSystemImpl.java |  6 +++---
 .../hadoop/metrics2/lib/MethodMetric.java       |  7 ++++---
 .../metrics2/lib/MetricsSourceBuilder.java      |  7 ++++---
 .../metrics2/lib/MutableMetricsFactory.java     |  7 ++++---
 .../hadoop/metrics2/lib/MutableRates.java       |  7 +++----
 .../lib/MutableRatesWithAggregation.java        |  7 ++++---
 .../hadoop/metrics2/sink/GraphiteSink.java      |  7 ++++---
 .../sink/ganglia/AbstractGangliaSink.java       | 10 +++++-----
 .../metrics2/sink/ganglia/GangliaSink30.java    |  6 +++---
 .../metrics2/sink/ganglia/GangliaSink31.java    |  8 ++++----
 .../org/apache/hadoop/metrics2/util/MBeans.java |  6 +++---
 .../hadoop/metrics2/util/MetricsCache.java      |  6 +++---
 .../main/java/org/apache/hadoop/net/DNS.java    |  6 +++---
 .../java/org/apache/hadoop/net/NetUtils.java    |  6 +++---
 .../apache/hadoop/net/ScriptBasedMapping.java   |  8 ++++----
 .../apache/hadoop/net/SocketIOWithTimeout.java  |  6 +++---
 .../org/apache/hadoop/net/TableMapping.java     |  6 +++---
 .../apache/hadoop/net/unix/DomainSocket.java    | 10 +++++-----
 .../hadoop/net/unix/DomainSocketWatcher.java    | 12 ++++++------
 .../hadoop/security/CompositeGroupsMapping.java |  7 ++++---
 .../org/apache/hadoop/security/Credentials.java | 10 +++++-----
 .../java/org/apache/hadoop/security/Groups.java |  7 +++----
 .../HttpCrossOriginFilterInitializer.java       |  8 ++++----
 .../security/JniBasedUnixGroupsMapping.java     |  8 ++++----
 .../JniBasedUnixGroupsMappingWithFallback.java  |  8 ++++----
 .../JniBasedUnixGroupsNetgroupMapping.java      |  8 ++++----
 ...edUnixGroupsNetgroupMappingWithFallback.java |  8 ++++----
 .../hadoop/security/LdapGroupsMapping.java      |  7 ++++---
 .../apache/hadoop/security/ProviderUtils.java   |  7 ++++---
 .../apache/hadoop/security/SaslInputStream.java |  7 ++++---
 .../apache/hadoop/security/SaslRpcClient.java   |  7 ++++---
 .../apache/hadoop/security/SaslRpcServer.java   |  6 +++---
 .../apache/hadoop/security/SecurityUtil.java    |  9 ++++-----
 .../hadoop/security/ShellBasedIdMapping.java    |  8 ++++----
 .../ShellBasedUnixGroupsNetgroupMapping.java    |  8 ++++----
 .../hadoop/security/WhitelistBasedResolver.java |  7 ++++---
 .../alias/AbstractJavaKeyStoreProvider.java     |  6 +++---
 .../authorize/ServiceAuthorizationManager.java  |  9 +++++----
 .../hadoop/security/http/CrossOriginFilter.java |  7 ++++---
 .../security/ssl/FileBasedKeyStoresFactory.java |  8 ++++----
 .../security/ssl/ReloadingX509TrustManager.java |  7 ++++---
 .../ssl/SslSelectChannelConnectorSecure.java    |  8 ++++----
 .../org/apache/hadoop/security/token/Token.java |  6 +++---
 .../AbstractDelegationTokenSecretManager.java   |  8 ++++----
 .../apache/hadoop/service/AbstractService.java  |  9 +++++----
 .../apache/hadoop/service/CompositeService.java |  7 ++++---
 .../service/LoggingStateChangeListener.java     | 11 ++++++-----
 .../hadoop/service/ServiceOperations.java       |  5 +++--
 .../tracing/TracerConfigurationManager.java     |  8 ++++----
 .../hadoop/util/ApplicationClassLoader.java     | 10 +++++-----
 .../apache/hadoop/util/AsyncDiskService.java    |  7 ++++---
 .../apache/hadoop/util/CombinedIPWhiteList.java |  7 ++++---
 .../org/apache/hadoop/util/FileBasedIPList.java | 11 ++++++-----
 .../main/java/org/apache/hadoop/util/GSet.java  |  6 +++---
 .../hadoop/util/GenericOptionsParser.java       |  7 ++++---
 .../org/apache/hadoop/util/HostsFileReader.java |  9 +++++----
 .../apache/hadoop/util/IntrusiveCollection.java |  7 ++++---
 .../org/apache/hadoop/util/JvmPauseMonitor.java |  6 +++---
 .../org/apache/hadoop/util/MachineList.java     |  6 +++---
 .../apache/hadoop/util/NativeCodeLoader.java    |  8 ++++----
 .../hadoop/util/NodeHealthScriptRunner.java     |  7 ++++---
 .../java/org/apache/hadoop/util/Progress.java   |  6 +++---
 .../apache/hadoop/util/ShutdownHookManager.java |  7 ++++---
 .../hadoop/util/ShutdownThreadsHelper.java      |  7 ++++---
 .../org/apache/hadoop/util/SysInfoLinux.java    |  7 +++----
 .../org/apache/hadoop/util/SysInfoWindows.java  |  7 ++++---
 .../java/org/apache/hadoop/util/ThreadUtil.java |  7 +++----
 .../org/apache/hadoop/util/VersionInfo.java     |  8 ++++----
 .../hadoop/util/concurrent/AsyncGetFuture.java  |  7 ++++---
 .../hadoop/util/concurrent/ExecutorHelper.java  |  8 ++++----
 .../HadoopScheduledThreadPoolExecutor.java      |  8 ++++----
 .../concurrent/HadoopThreadPoolExecutor.java    |  8 ++++----
 .../org/apache/hadoop/cli/CLITestHelper.java    |  9 +++++----
 .../hadoop/crypto/CryptoStreamsTestBase.java    |  6 +++---
 .../apache/hadoop/crypto/TestCryptoCodec.java   |  7 ++++---
 .../apache/hadoop/fs/FCStatisticsBaseTest.java  |  7 ++++---
 .../org/apache/hadoop/fs/TestFileContext.java   |  7 ++++---
 .../org/apache/hadoop/fs/TestFileStatus.java    |  8 ++++----
 .../java/org/apache/hadoop/fs/TestFileUtil.java |  6 +++---
 .../org/apache/hadoop/fs/TestFsShellCopy.java   |  6 +++---
 .../apache/hadoop/fs/TestFsShellReturnCode.java |  8 ++++----
 .../org/apache/hadoop/fs/TestFsShellTouch.java  |  6 +++---
 .../org/apache/hadoop/fs/TestHarFileSystem.java |  7 ++++---
 .../fs/contract/AbstractBondedFSContract.java   |  8 ++++----
 .../hadoop/fs/loadGenerator/LoadGenerator.java  | 10 +++++-----
 .../hadoop/ha/ActiveStandbyElectorTestUtil.java |  6 +++---
 .../org/apache/hadoop/ha/DummyHAService.java    |  7 ++++---
 .../org/apache/hadoop/ha/MiniZKFCCluster.java   |  7 ++++---
 .../java/org/apache/hadoop/ha/TestHAAdmin.java  |  6 +++---
 .../org/apache/hadoop/ha/TestHealthMonitor.java |  6 +++---
 .../apache/hadoop/http/TestGlobalFilter.java    |  6 +++---
 .../org/apache/hadoop/http/TestHttpServer.java  |  6 +++---
 .../apache/hadoop/http/TestHttpServerLogs.java  |  6 +++---
 .../hadoop/http/TestHttpServerWebapps.java      |  9 +++++----
 .../hadoop/http/TestHttpServerWithSpengo.java   |  7 ++++---
 .../org/apache/hadoop/http/TestPathFilter.java  |  6 +++---
 .../apache/hadoop/http/TestSSLHttpServer.java   |  7 ++++---
 .../apache/hadoop/http/TestServletFilter.java   |  6 +++---
 .../hadoop/http/resource/JerseyResource.java    |  6 +++---
 .../org/apache/hadoop/io/TestArrayFile.java     |  7 +++++--
 .../hadoop/io/TestDefaultStringifier.java       |  8 +++++---
 .../org/apache/hadoop/io/TestSequenceFile.java  |  7 ++++---
 .../java/org/apache/hadoop/io/TestSetFile.java  |  7 +++----
 .../org/apache/hadoop/io/TestWritableUtils.java |  7 ++++---
 .../apache/hadoop/io/compress/TestCodec.java    |  9 +++++----
 .../io/compress/TestCompressionStreamReuse.java |  9 +++++----
 .../apache/hadoop/io/nativeio/TestNativeIO.java | 10 +++++-----
 .../TestSharedFileDescriptorFactory.java        |  7 ++++---
 .../org/apache/hadoop/ipc/TestAsyncIPC.java     | 10 +++++-----
 .../java/org/apache/hadoop/ipc/TestIPC.java     | 17 ++++++++---------
 .../hadoop/ipc/TestIPCServerResponder.java      | 10 +++++-----
 .../ipc/TestProtoBufRpcServerHandoff.java       | 12 ++++++------
 .../java/org/apache/hadoop/ipc/TestRPC.java     |  6 +++---
 .../apache/hadoop/ipc/TestRPCCompatibility.java |  8 ++++----
 .../hadoop/ipc/TestRPCServerShutdown.java       |  7 ++++---
 .../apache/hadoop/ipc/TestRpcServerHandoff.java |  8 ++++----
 .../java/org/apache/hadoop/ipc/TestSaslRPC.java |  7 +++----
 .../java/org/apache/hadoop/ipc/TestServer.java  |  8 ++++----
 .../ipc/TestWeightedRoundRobinMultiplexer.java  |  7 ++++---
 .../metrics2/impl/TestGangliaMetrics.java       |  7 ++++---
 .../hadoop/metrics2/impl/TestMetricsConfig.java |  7 ++++---
 .../metrics2/impl/TestMetricsSystemImpl.java    | 11 ++++++-----
 .../hadoop/metrics2/impl/TestSinkQueue.java     | 10 ++++++----
 .../hadoop/metrics2/lib/TestMutableMetrics.java |  7 ++++---
 .../hadoop/metrics2/util/TestMetricsCache.java  |  8 +++++---
 .../org/apache/hadoop/net/ServerSocketUtil.java |  9 +++++----
 .../java/org/apache/hadoop/net/TestDNS.java     |  6 +++---
 .../org/apache/hadoop/net/TestNetUtils.java     |  6 +++---
 .../hadoop/net/TestSocketIOWithTimeout.java     |  8 +++++---
 .../apache/hadoop/net/TestStaticMapping.java    |  7 ++++---
 .../hadoop/net/unix/TestDomainSocket.java       |  2 +-
 .../net/unix/TestDomainSocketWatcher.java       | 15 ++++++++-------
 .../security/TestCompositeGroupMapping.java     |  7 ++++---
 .../hadoop/security/TestDoAsEffectiveUser.java  |  7 ++++---
 .../hadoop/security/TestGroupFallback.java      |  7 ++++---
 .../hadoop/security/TestGroupsCaching.java      |  8 ++++----
 .../TestShellBasedUnixGroupsMapping.java        |  8 ++++----
 .../alias/TestCredentialProviderFactory.java    |  7 ++++---
 .../authorize/TestAccessControlList.java        |  8 ++++----
 .../security/authorize/TestProxyUsers.java      |  8 ++++----
 .../token/delegation/TestDelegationToken.java   |  7 ++++---
 .../hadoop/service/TestCompositeService.java    |  7 ++++---
 .../hadoop/service/TestServiceLifecycle.java    |  7 ++++---
 .../org/apache/hadoop/test/MetricsAsserts.java  |  6 +++---
 .../hadoop/test/MultithreadedTestUtil.java      |  8 ++++----
 .../org/apache/hadoop/test/TestJUnitSetup.java  |  7 ++++---
 .../hadoop/util/TestAsyncDiskService.java       |  7 ++++---
 .../org/apache/hadoop/util/TestClasspath.java   |  9 +++++----
 .../hadoop/util/TestIdentityHashStore.java      |  7 ++++---
 .../apache/hadoop/util/TestLightWeightGSet.java |  7 ++++---
 .../util/TestLightWeightResizableGSet.java      |  7 ++++---
 .../hadoop/util/TestNativeCodeLoader.java       |  6 +++---
 .../apache/hadoop/util/TestSignalLogger.java    | 11 ++++++-----
 .../org/apache/hadoop/util/TestWinUtils.java    |  6 +++---
 .../org/apache/hadoop/mount/MountdBase.java     | 12 ++++++------
 .../java/org/apache/hadoop/nfs/NfsExports.java  |  6 +++---
 .../org/apache/hadoop/nfs/nfs3/FileHandle.java  |  6 +++---
 .../org/apache/hadoop/nfs/nfs3/Nfs3Base.java    | 10 +++++-----
 .../hadoop/oncrpc/RegistrationClient.java       |  7 ++++---
 .../java/org/apache/hadoop/oncrpc/RpcCall.java  |  8 ++++----
 .../org/apache/hadoop/oncrpc/RpcProgram.java    |  6 +++---
 .../java/org/apache/hadoop/oncrpc/RpcUtil.java  | 11 ++++++-----
 .../hadoop/oncrpc/SimpleTcpClientHandler.java   |  7 ++++---
 .../apache/hadoop/oncrpc/SimpleTcpServer.java   |  7 ++++---
 .../apache/hadoop/oncrpc/SimpleUdpServer.java   |  7 ++++---
 .../hadoop/oncrpc/security/Credentials.java     |  6 +++---
 .../hadoop/oncrpc/security/SecurityHandler.java |  7 ++++---
 .../java/org/apache/hadoop/portmap/Portmap.java |  8 ++++----
 .../hadoop/portmap/RpcProgramPortmap.java       |  7 ++++---
 262 files changed, 1044 insertions(+), 955 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 253c47a..ceef588 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -77,8 +77,6 @@ import javax.xml.transform.stream.StreamResult;
 
 import com.google.common.base.Charsets;
 import org.apache.commons.collections.map.UnmodifiableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -98,6 +96,8 @@ import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonGenerator;
 import org.codehaus.stax2.XMLInputFactory2;
 import org.codehaus.stax2.XMLStreamReader2;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
@@ -183,11 +183,11 @@ import com.google.common.base.Preconditions;
 @InterfaceStability.Stable
 public class Configuration implements Iterable<Map.Entry<String,String>>,
                                       Writable {
-  private static final Log LOG =
-    LogFactory.getLog(Configuration.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Configuration.class);
 
-  private static final Log LOG_DEPRECATION =
-    LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation");
+  private static final Logger LOG_DEPRECATION = LoggerFactory.getLogger(
+          "org.apache.hadoop.conf.Configuration.deprecation");
 
   private boolean quietmode = true;
 
@@ -2801,10 +2801,10 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       }
       return null;
     } catch (IOException e) {
-      LOG.fatal("error parsing conf " + name, e);
+      LOG.error("error parsing conf " + name, e);
       throw new RuntimeException(e);
     } catch (XMLStreamException e) {
-      LOG.fatal("error parsing conf " + name, e);
+      LOG.error("error parsing conf " + name, e);
       throw new RuntimeException(e);
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
index bdd006d..146c6d8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
@@ -22,9 +22,10 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Collection;
@@ -41,8 +42,8 @@ import java.util.Map;
 public abstract class ReconfigurableBase 
   extends Configured implements Reconfigurable {
   
-  private static final Log LOG =
-    LogFactory.getLog(ReconfigurableBase.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReconfigurableBase.class);
   // Use for testing purpose.
   private ReconfigurationUtil reconfigurationUtil = new ReconfigurationUtil();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
index bb221ee..5a616f7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.conf;
 
-import org.apache.commons.logging.*;
-
 import org.apache.commons.lang.StringEscapeUtils;
 
 import java.util.Collection;
@@ -33,6 +31,8 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A servlet for changing a node's configuration.
@@ -45,8 +45,8 @@ public class ReconfigurationServlet extends HttpServlet {
   
   private static final long serialVersionUID = 1L;
 
-  private static final Log LOG =
-    LogFactory.getLog(ReconfigurationServlet.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReconfigurationServlet.class);
 
   // the prefix used to fing the attribute holding the reconfigurable 
   // for a given request

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
index 61ee743..de0e5dd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
@@ -26,12 +26,12 @@ import javax.crypto.Cipher;
 import javax.crypto.spec.IvParameterSpec;
 import javax.crypto.spec.SecretKeySpec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY;
@@ -42,8 +42,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
 @InterfaceAudience.Private
 public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec {
-  private static final Log LOG =
-      LogFactory.getLog(JceAesCtrCryptoCodec.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JceAesCtrCryptoCodec.class.getName());
   
   private Configuration conf;
   private String provider;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
index d08e588..8d01f42 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
@@ -26,22 +26,22 @@ import java.security.GeneralSecurityException;
 import java.security.SecureRandom;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.crypto.random.OsSecureRandom;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implement the AES-CTR crypto codec using JNI into OpenSSL.
  */
 @InterfaceAudience.Private
 public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec {
-  private static final Log LOG =
-      LogFactory.getLog(OpensslAesCtrCryptoCodec.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslAesCtrCryptoCodec.class.getName());
 
   private Configuration conf;
   private Random random;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 264652b..286f9ac 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -26,12 +26,12 @@ import javax.crypto.IllegalBlockSizeException;
 import javax.crypto.NoSuchPaddingException;
 import javax.crypto.ShortBufferException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OpenSSL cipher using JNI.
@@ -40,8 +40,8 @@ import com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Private
 public final class OpensslCipher {
-  private static final Log LOG =
-      LogFactory.getLog(OpensslCipher.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslCipher.class.getName());
   public static final int ENCRYPT_MODE = 1;
   public static final int DECRYPT_MODE = 0;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
index b1fa988..519595a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.crypto.random;
 
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OpenSSL secure random using JNI.
@@ -43,8 +43,8 @@ import com.google.common.base.Preconditions;
 @InterfaceAudience.Private
 public class OpensslSecureRandom extends Random {
   private static final long serialVersionUID = -7828193502768789584L;
-  private static final Log LOG =
-      LogFactory.getLog(OpensslSecureRandom.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslSecureRandom.class.getName());
   
   /** If native SecureRandom unavailable, use java SecureRandom */
   private java.security.SecureRandom fallback = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
index 9428b98..6671591 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT;
@@ -39,7 +39,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
 @InterfaceAudience.Private
 public class OsSecureRandom extends Random implements Closeable, Configurable {
-  public static final Log LOG = LogFactory.getLog(OsSecureRandom.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(OsSecureRandom.class);
   
   private static final long serialVersionUID = 6391500337172057900L;
 
@@ -112,7 +113,7 @@ public class OsSecureRandom extends Random implements Closeable, Configurable {
   @Override
   synchronized public void close() {
     if (stream != null) {
-      IOUtils.cleanup(LOG, stream);
+      IOUtils.cleanupWithLogger(LOG, stream);
       stream = null;
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index ef68437..9bea8f9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -32,8 +32,6 @@ import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -52,6 +50,8 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class provides an interface for implementors of a Hadoop file system
@@ -66,7 +66,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class AbstractFileSystem {
-  static final Log LOG = LogFactory.getLog(AbstractFileSystem.class);
+  static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class);
 
   /** Recording statistics per a file system class. */
   private static final Map<URI, Statistics> 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index 384b32c..b536b27 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -27,14 +27,14 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Abstract Checksumed Fs.
@@ -110,8 +110,8 @@ public abstract class ChecksumFs extends FilterFs {
    * It verifies that data matches checksums.
    *******************************************************/
   private static class ChecksumFSInputChecker extends FSInputChecker {
-    public static final Log LOG 
-      = LogFactory.getLog(FSInputChecker.class);
+    public static final Logger LOG =
+        LoggerFactory.getLogger(FSInputChecker.class);
     private static final int HEADER_LENGTH = 8;
     
     private ChecksumFs fs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 3542a9b..09c3a8a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -26,12 +26,12 @@ import java.util.concurrent.DelayQueue;
 import java.util.concurrent.Delayed;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A daemon thread that waits for the next file system to renew.
@@ -39,8 +39,8 @@ import org.apache.hadoop.util.Time;
 @InterfaceAudience.Private
 public class DelegationTokenRenewer
     extends Thread {
-  private static final Log LOG = LogFactory
-      .getLog(DelegationTokenRenewer.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(DelegationTokenRenewer.class);
 
   /** The renewable interface used by the renewer. */
   public interface Renewable {
@@ -243,7 +243,7 @@ public class DelegationTokenRenewer
         LOG.error("Interrupted while canceling token for " + fs.getUri()
             + "filesystem");
         if (LOG.isDebugEnabled()) {
-          LOG.debug(ie.getStackTrace());
+          LOG.debug("Exception in removeRenewAction: ", ie);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index 8505679..5f8291d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -22,11 +22,12 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.util.zip.Checksum;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.nio.ByteBuffer;
 import java.nio.IntBuffer;
 
@@ -37,8 +38,8 @@ import java.nio.IntBuffer;
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 abstract public class FSInputChecker extends FSInputStream {
-  public static final Log LOG 
-  = LogFactory.getLog(FSInputChecker.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(FSInputChecker.class);
   
   /** The file name from which data is read from */
   protected Path file;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index b2f0f47..341d11f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -35,8 +35,6 @@ import java.util.Stack;
 import java.util.TreeSet;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -62,6 +60,8 @@ import org.apache.hadoop.util.ShutdownHookManager;
 
 import com.google.common.base.Preconditions;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The FileContext class provides an interface to the application writer for
@@ -178,7 +178,7 @@ import org.apache.htrace.core.Tracer;
 @InterfaceStability.Stable
 public class FileContext {
   
-  public static final Log LOG = LogFactory.getLog(FileContext.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FileContext.class);
   /**
    * Default permission for directory and symlink
    * In previous versions, this default permission was also used to
@@ -343,7 +343,7 @@ public class FileContext {
         }
       });
     } catch (InterruptedException ex) {
-      LOG.error(ex);
+      LOG.error(ex.toString());
       throw new IOException("Failed to get the AbstractFileSystem for path: "
           + uri, ex);
     }
@@ -457,7 +457,7 @@ public class FileContext {
     } catch (UnsupportedFileSystemException ex) {
       throw ex;
     } catch (IOException ex) {
-      LOG.error(ex);
+      LOG.error(ex.toString());
       throw new RuntimeException(ex);
     }
     return getFileContext(defaultAfs, aConf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 155f0be..f279e5e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -45,8 +45,6 @@ import java.util.zip.ZipFile;
 import org.apache.commons.collections.map.CaseInsensitiveMap;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -57,6 +55,8 @@ import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A collection of file-processing util methods
@@ -65,7 +65,7 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceStability.Evolving
 public class FileUtil {
 
-  private static final Log LOG = LogFactory.getLog(FileUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
 
   /* The error code is defined in winutils to indicate insufficient
    * privilege to create symbolic links. This value need to keep in
@@ -732,7 +732,7 @@ public class FileUtil {
         entry = tis.getNextTarEntry();
       }
     } finally {
-      IOUtils.cleanup(LOG, tis, inputStream);
+      IOUtils.cleanupWithLogger(LOG, tis, inputStream);
     }
   }
   
@@ -1317,7 +1317,7 @@ public class FileUtil {
       bos = new BufferedOutputStream(fos);
       jos = new JarOutputStream(bos, jarManifest);
     } finally {
-      IOUtils.cleanup(LOG, jos, bos, fos);
+      IOUtils.cleanupWithLogger(LOG, jos, bos, fos);
     }
     String[] jarCp = {classPathJar.getCanonicalPath(),
                         unexpandedWildcardClasspath.toString()};

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 97b65f2..eccfbfc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
 import java.util.LinkedList;
 
 import org.apache.commons.lang.WordUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -39,12 +37,14 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Provide command line access to a FileSystem. */
 @InterfaceAudience.Private
 public class FsShell extends Configured implements Tool {
   
-  static final Log LOG = LogFactory.getLog(FsShell.class);
+  static final Logger LOG = LoggerFactory.getLogger(FsShell.class);
 
   private static final int MAX_LINE_WIDTH = 80;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
index 0a82929..76e379c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
@@ -22,7 +22,6 @@ import java.util.LinkedList;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.permission.ChmodParser;
@@ -32,6 +31,7 @@ import org.apache.hadoop.fs.shell.CommandFormat;
 import org.apache.hadoop.fs.shell.FsCommand;
 import org.apache.hadoop.fs.shell.PathData;
 import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
 
 /**
  * This class is the home for file permissions related commands.
@@ -41,7 +41,7 @@ import org.apache.hadoop.util.Shell;
 @InterfaceStability.Unstable
 public class FsShellPermissions extends FsCommand {
 
-  static Log LOG = FsShell.LOG;
+  static final Logger LOG = FsShell.LOG;
   
   /**
    * Register the permission related commands with the factory

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
index 7c69167..ca3db1d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
@@ -23,18 +23,19 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 class Globber {
-  public static final Log LOG = LogFactory.getLog(Globber.class.getName());
+  public static final Logger LOG =
+      LoggerFactory.getLogger(Globber.class.getName());
 
   private final FileSystem fs;
   private final FileContext fc;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index b5d83f2..aa58706 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.fs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.EOFException;
 import java.io.FileNotFoundException;
@@ -50,7 +50,8 @@ import java.util.*;
 
 public class HarFileSystem extends FileSystem {
 
-  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HarFileSystem.class);
 
   public static final String METADATA_CACHE_ENTRIES_KEY = "fs.har.metadatacache.entries";
   public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
@@ -1173,7 +1174,7 @@ public class HarFileSystem extends FileSystem {
         LOG.warn("Encountered exception ", ioe);
         throw ioe;
       } finally {
-        IOUtils.cleanup(LOG, lin, in);
+        IOUtils.cleanupWithLogger(LOG, lin, in);
       }
 
       FSDataInputStream aIn = fs.open(archiveIndexPath);
@@ -1198,7 +1199,7 @@ public class HarFileSystem extends FileSystem {
           }
         }
       } finally {
-        IOUtils.cleanup(LOG, aIn);
+        IOUtils.cleanupWithLogger(LOG, aIn);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 1ed01ea..c1e9d21 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -23,14 +23,15 @@ import java.util.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.commons.logging.*;
 import org.apache.hadoop.util.*;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration; 
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** An implementation of a round-robin scheme for disk allocation for creating
  * files. The way it works is that it is kept track what disk was last
@@ -245,8 +246,8 @@ public class LocalDirAllocator {
   
   private static class AllocatorPerContext {
 
-    private final Log LOG =
-      LogFactory.getLog(AllocatorPerContext.class);
+    private static final Logger LOG =
+        LoggerFactory.getLogger(AllocatorPerContext.class);
 
     private Random dirIndexRandomizer = new Random();
     private String contextCfgItemName;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
index b771812..35cd804 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
@@ -19,11 +19,12 @@ package org.apache.hadoop.fs;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** 
  * Provides a trash facility which supports pluggable Trash policies. 
@@ -34,8 +35,7 @@ import org.apache.hadoop.conf.Configured;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class Trash extends Configured {
-  private static final org.apache.commons.logging.Log LOG =
-      LogFactory.getLog(Trash.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Trash.class);
 
   private TrashPolicy trashPolicy; // configured trash policy instance
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
index c65e16a..265e967 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
@@ -30,8 +30,6 @@ import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -41,6 +39,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Provides a <i>trash</i> feature.  Files are moved to a user's trash
  * directory, a subdirectory of their home directory named ".Trash".  Files are
@@ -54,8 +54,8 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class TrashPolicyDefault extends TrashPolicy {
-  private static final Log LOG =
-    LogFactory.getLog(TrashPolicyDefault.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TrashPolicyDefault.class);
 
   private static final Path CURRENT = new Path("Current");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
index 3259463..e0ef42f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
@@ -25,8 +25,6 @@ import java.net.URI;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.ftp.FTP;
 import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPFile;
@@ -45,6 +43,8 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -56,8 +56,8 @@ import org.apache.hadoop.util.Progressable;
 @InterfaceStability.Stable
 public class FTPFileSystem extends FileSystem {
 
-  public static final Log LOG = LogFactory
-      .getLog(FTPFileSystem.class);
+  public static final Logger LOG = LoggerFactory
+      .getLogger(FTPFileSystem.class);
 
   public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
index 379e41f..7d36648 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -21,8 +21,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -30,6 +28,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A class for file/directory permissions.
@@ -37,7 +37,7 @@ import org.apache.hadoop.io.WritableFactory;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class FsPermission implements Writable {
-  private static final Log LOG = LogFactory.getLog(FsPermission.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FsPermission.class);
 
   static final WritableFactory FACTORY = new WritableFactory() {
     @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
index c7fae7b..de86bab 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
@@ -23,19 +23,20 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
 
 import com.jcraft.jsch.ChannelSftp;
 import com.jcraft.jsch.JSch;
 import com.jcraft.jsch.JSchException;
 import com.jcraft.jsch.Session;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Concurrent/Multiple Connections. */
 class SFTPConnectionPool {
 
-  public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SFTPFileSystem.class);
   // Maximum number of allowed live connections. This doesn't mean we cannot
   // have more live connections. It means that when we have more
   // live connections than this threshold, any unused connection will be

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
index e181ced..61cdfc4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
@@ -26,8 +26,6 @@ import java.net.URLDecoder;
 import java.util.ArrayList;
 import java.util.Vector;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -41,11 +39,14 @@ import com.jcraft.jsch.ChannelSftp;
 import com.jcraft.jsch.ChannelSftp.LsEntry;
 import com.jcraft.jsch.SftpATTRS;
 import com.jcraft.jsch.SftpException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** SFTP FileSystem. */
 public class SFTPFileSystem extends FileSystem {
 
-  public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SFTPFileSystem.class);
 
   private SFTPConnectionPool connectionPool;
   private URI uri;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index cda26e8..3b26acf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -26,8 +26,6 @@ import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +33,8 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An abstract class for the execution of a file system command
@@ -58,7 +58,7 @@ abstract public class Command extends Configured {
   private int depth = 0;
   protected ArrayList<Exception> exceptions = new ArrayList<Exception>();
 
-  private static final Log LOG = LogFactory.getLog(Command.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Command.class);
 
   /** allows stdout to be captured if necessary */
   public PrintStream out = System.out;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index 1ce116e..b5cac25 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -26,8 +26,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -47,6 +45,8 @@ import org.apache.zookeeper.KeeperException.Code;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * 
@@ -141,7 +141,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   @VisibleForTesting
   protected static final String BREADCRUMB_FILENAME = "ActiveBreadCrumb";
 
-  public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(ActiveStandbyElector.class);
 
   private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000;
 
@@ -712,7 +713,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   }
 
   private void fatalError(String errorMessage) {
-    LOG.fatal(errorMessage);
+    LOG.error(errorMessage);
     reset();
     appClient.notifyFatalError(errorMessage);
   }
@@ -824,10 +825,10 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
         createConnection();
         success = true;
       } catch(IOException e) {
-        LOG.warn(e);
+        LOG.warn(e.toString());
         sleepFor(5000);
       } catch(KeeperException e) {
-        LOG.warn(e);
+        LOG.warn(e.toString());
         sleepFor(5000);
       }
       ++connectionRetryCount;
@@ -866,7 +867,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
     try {
       tempZk.close();
     } catch(InterruptedException e) {
-      LOG.warn(e);
+      LOG.warn(e.toString());
     }
     zkConnectionState = ConnectionState.TERMINATED;
     wantToBeInElection = false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
index d952e29..3c05a25 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
@@ -19,9 +19,6 @@ package org.apache.hadoop.ha;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -32,6 +29,8 @@ import org.apache.hadoop.ha.HAServiceProtocol.RequestSource;
 import org.apache.hadoop.ipc.RPC;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The FailOverController is responsible for electing an active service
@@ -43,7 +42,8 @@ import com.google.common.base.Preconditions;
 @InterfaceStability.Evolving
 public class FailoverController {
 
-  private static final Log LOG = LogFactory.getLog(FailoverController.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FailoverController.class);
 
   private final int gracefulFenceTimeout;
   private final int rpcTimeoutToNewActive;
@@ -252,7 +252,7 @@ public class FailoverController {
         } catch (FailoverFailedException ffe) {
           msg += ". Failback to " + fromSvc +
             " failed (" + ffe.getMessage() + ")";
-          LOG.fatal(msg);
+          LOG.error(msg);
         }
       }
       throw new FailoverFailedException(msg, cause);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index 5eff14c..9b7d7ba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -28,8 +28,6 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -43,6 +41,8 @@ import org.apache.hadoop.util.ToolRunner;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A command-line tool for making calls in the HAServiceProtocol.
@@ -62,7 +62,7 @@ public abstract class HAAdmin extends Configured implements Tool {
    * operation, which is why it is not documented in the usage below.
    */
   private static final String FORCEMANUAL = "forcemanual";
-  private static final Log LOG = LogFactory.getLog(HAAdmin.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HAAdmin.class);
 
   private int rpcTimeoutForChecks = -1;
   
@@ -449,7 +449,7 @@ public abstract class HAAdmin extends Configured implements Tool {
     
     if (cmdLine.hasOption(FORCEMANUAL)) {
       if (!confirmForceManual()) {
-        LOG.fatal("Aborted");
+        LOG.error("Aborted");
         return -1;
       }
       // Instruct the NNs to honor this request even if they're

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
index 24c149c..a93df75 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
@@ -23,8 +23,6 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.ha.HAServiceProtocol;
@@ -35,6 +33,8 @@ import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.util.Daemon;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is a daemon which runs in a loop, periodically heartbeating
@@ -47,7 +47,7 @@ import com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Private
 public class HealthMonitor {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       HealthMonitor.class);
 
   private Daemon daemon;
@@ -283,7 +283,7 @@ public class HealthMonitor {
       setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
         @Override
         public void uncaughtException(Thread t, Throwable e) {
-          LOG.fatal("Health monitor failed", e);
+          LOG.error("Health monitor failed", e);
           enterState(HealthMonitor.State.HEALTH_MONITOR_FAILED);
         }
       });

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
index 4898b38..63f6db6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
@@ -22,8 +22,6 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -31,6 +29,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class parses the configured list of fencing methods, and
@@ -61,7 +61,7 @@ public class NodeFencer {
   private static final Pattern HASH_COMMENT_RE =
     Pattern.compile("#.*$");
 
-  private static final Log LOG = LogFactory.getLog(NodeFencer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(NodeFencer.class);
 
   /**
    * Standard fencing methods included with Hadoop.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
index 64cd5a8..9ae113b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
@@ -23,8 +23,6 @@ import java.util.Collection;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -272,7 +270,7 @@ public class SshFenceByTcpPort extends Configured
    * Adapter from JSch's logger interface to our log4j
    */
   private static class LogAdapter implements com.jcraft.jsch.Logger {
-    static final Log LOG = LogFactory.getLog(
+    static final Logger LOG = LoggerFactory.getLogger(
         SshFenceByTcpPort.class.getName() + ".jsch");
 
     @Override
@@ -285,9 +283,8 @@ public class SshFenceByTcpPort extends Configured
       case com.jcraft.jsch.Logger.WARN:
         return LOG.isWarnEnabled();
       case com.jcraft.jsch.Logger.ERROR:
-        return LOG.isErrorEnabled();
       case com.jcraft.jsch.Logger.FATAL:
-        return LOG.isFatalEnabled();
+        return LOG.isErrorEnabled();
       default:
         return false;
       }
@@ -306,10 +303,8 @@ public class SshFenceByTcpPort extends Configured
         LOG.warn(message);
         break;
       case com.jcraft.jsch.Logger.ERROR:
-        LOG.error(message);
-        break;
       case com.jcraft.jsch.Logger.FATAL:
-        LOG.fatal(message);
+        LOG.error(message);
         break;
       default:
         break;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
index e33f012..7ada04a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
@@ -27,8 +27,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -56,11 +54,13 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Throwables;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.LimitedPrivate("HDFS")
 public abstract class ZKFailoverController {
 
-  static final Log LOG = LogFactory.getLog(ZKFailoverController.class);
+  static final Logger LOG = LoggerFactory.getLogger(ZKFailoverController.class);
   
   public static final String ZK_QUORUM_KEY = "ha.zookeeper.quorum";
   private static final String ZK_SESSION_TIMEOUT_KEY = "ha.zookeeper.session-timeout.ms";
@@ -161,7 +161,7 @@ public abstract class ZKFailoverController {
 
   public int run(final String[] args) throws Exception {
     if (!localTarget.isAutoFailoverEnabled()) {
-      LOG.fatal("Automatic failover is not enabled for " + localTarget + "." +
+      LOG.error("Automatic failover is not enabled for " + localTarget + "." +
           " Please ensure that automatic failover is enabled in the " +
           "configuration before running the ZK failover controller.");
       return ERR_CODE_AUTO_FAILOVER_NOT_ENABLED;
@@ -183,7 +183,7 @@ public abstract class ZKFailoverController {
         }
       });
     } catch (RuntimeException rte) {
-      LOG.fatal("The failover controller encounters runtime error: " + rte);
+      LOG.error("The failover controller encounters runtime error: " + rte);
       throw (Exception)rte.getCause();
     }
   }
@@ -194,7 +194,7 @@ public abstract class ZKFailoverController {
     try {
       initZK();
     } catch (KeeperException ke) {
-      LOG.fatal("Unable to start failover controller. Unable to connect "
+      LOG.error("Unable to start failover controller. Unable to connect "
           + "to ZooKeeper quorum at " + zkQuorum + ". Please check the "
           + "configured value for " + ZK_QUORUM_KEY + " and ensure that "
           + "ZooKeeper is running.");
@@ -220,7 +220,7 @@ public abstract class ZKFailoverController {
     }
 
     if (!elector.parentZNodeExists()) {
-      LOG.fatal("Unable to start failover controller. "
+      LOG.error("Unable to start failover controller. "
           + "Parent znode does not exist.\n"
           + "Run with -formatZK flag to initialize ZooKeeper.");
       return ERR_CODE_NO_PARENT_ZNODE;
@@ -229,7 +229,7 @@ public abstract class ZKFailoverController {
     try {
       localTarget.checkFencingConfigured();
     } catch (BadFencingConfigurationException e) {
-      LOG.fatal("Fencing is not configured for " + localTarget + ".\n" +
+      LOG.error("Fencing is not configured for " + localTarget + ".\n" +
           "You must configure a fencing method before using automatic " +
           "failover.", e);
       return ERR_CODE_NO_FENCER;
@@ -375,7 +375,7 @@ public abstract class ZKFailoverController {
   }
   
   private synchronized void fatalError(String err) {
-    LOG.fatal("Fatal error occurred:" + err);
+    LOG.error("Fatal error occurred:" + err);
     fatalError = err;
     notifyAll();
   }
@@ -394,7 +394,7 @@ public abstract class ZKFailoverController {
 
     } catch (Throwable t) {
       String msg = "Couldn't make " + localTarget + " active";
-      LOG.fatal(msg, t);
+      LOG.error(msg, t);
       
       recordActiveAttempt(new ActiveAttemptRecord(false, msg + "\n" +
           StringUtils.stringifyException(t)));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
index 63bfbca..7f75582 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.ha.protocolPB;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.ha.HAServiceProtocol;
@@ -42,6 +40,8 @@ import org.apache.hadoop.ipc.RPC;
 
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is used on the server side. Calls come across the wire for the
@@ -61,7 +61,7 @@ public class HAServiceProtocolServerSideTranslatorPB implements
       TransitionToActiveResponseProto.newBuilder().build();
   private static final TransitionToStandbyResponseProto TRANSITION_TO_STANDBY_RESP = 
       TransitionToStandbyResponseProto.newBuilder().build();
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       HAServiceProtocolServerSideTranslatorPB.class);
   
   public HAServiceProtocolServerSideTranslatorPB(HAServiceProtocol server) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
index e6008a5..d517fef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
@@ -44,8 +44,6 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.ConfServlet;
@@ -82,6 +80,8 @@ import org.mortbay.thread.QueuedThreadPool;
 import org.mortbay.util.MultiException;
 
 import com.sun.jersey.spi.container.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
@@ -100,7 +100,7 @@ import com.sun.jersey.spi.container.servlet.ServletContainer;
 @InterfaceStability.Evolving
 @Deprecated
 public class HttpServer implements FilterContainer {
-  public static final Log LOG = LogFactory.getLog(HttpServer.class);
+  public static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
 
   static final String FILTER_INITIALIZER_PROPERTY
       = "hadoop.http.filter.initializers";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index de68ecb..861fa48 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -46,8 +46,6 @@ import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
 import com.google.common.collect.ImmutableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -96,6 +94,9 @@ import org.mortbay.util.MultiException;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.sun.jersey.spi.container.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
 
@@ -112,7 +113,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_US
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public final class HttpServer2 implements FilterContainer {
-  public static final Log LOG = LogFactory.getLog(HttpServer2.class);
+  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
 
   static final String FILTER_INITIALIZER_PROPERTY
       = "hadoop.http.filter.initializers";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
index 9ca5b92..fc64697 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
@@ -29,11 +29,11 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequestWrapper;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.http.FilterInitializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.Filter;
 
@@ -47,7 +47,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_S
 public class StaticUserWebFilter extends FilterInitializer {
   static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
 
-  private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StaticUserWebFilter.class);
 
   static class User implements Principal {
     private final String name;


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[3/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Posted by aa...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java
index 700999d..c978fec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.service;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This is a state change listener that logs events at INFO level
@@ -30,15 +30,16 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving;
 @Evolving
 public class LoggingStateChangeListener implements ServiceStateChangeListener {
 
-  private static final Log LOG = LogFactory.getLog(LoggingStateChangeListener.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(LoggingStateChangeListener.class);
 
-  private final Log log;
+  private final Logger log;
 
   /**
    * Log events to the given log
    * @param log destination for events
    */
-  public LoggingStateChangeListener(Log log) {
+  public LoggingStateChangeListener(Logger log) {
     //force an NPE if a null log came in
     log.isDebugEnabled();
     this.log = log;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
index a0a77ce..e7683a2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
@@ -22,10 +22,10 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class contains a set of methods to work with services, especially
@@ -34,7 +34,8 @@ import org.slf4j.Logger;
 @Public
 @Evolving
 public final class ServiceOperations {
-  private static final Log LOG = LogFactory.getLog(AbstractService.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AbstractService.class);
 
   private ServiceOperations() {
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java
index 75601ad..658e4d3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.tracing;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
 import org.apache.htrace.core.SpanReceiver;
 import org.apache.htrace.core.TracerPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class provides functions for managing the tracer configuration at
@@ -33,8 +33,8 @@ import org.apache.htrace.core.TracerPool;
  */
 @InterfaceAudience.Private
 public class TracerConfigurationManager implements TraceAdminProtocol {
-  private static final Log LOG =
-      LogFactory.getLog(TracerConfigurationManager.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TracerConfigurationManager.class);
 
   private final String confPrefix;
   private final Configuration conf;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
index 2f46e1f..972bbff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
@@ -29,12 +29,12 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link URLClassLoader} for application isolation. Classes from the
@@ -56,8 +56,8 @@ public class ApplicationClassLoader extends URLClassLoader {
   private static final String SYSTEM_CLASSES_DEFAULT_KEY =
       "system.classes.default";
 
-  private static final Log LOG =
-    LogFactory.getLog(ApplicationClassLoader.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ApplicationClassLoader.class.getName());
 
   static {
     try (InputStream is = ApplicationClassLoader.class.getClassLoader()
@@ -179,7 +179,7 @@ public class ApplicationClassLoader extends URLClassLoader {
         }
       } catch (ClassNotFoundException e) {
         if (LOG.isDebugEnabled()) {
-          LOG.debug(e);
+          LOG.debug(e.toString());
         }
         ex = e;
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
index df15166..8e48cb9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
@@ -26,10 +26,10 @@ import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /*
  * This class is a container of multiple thread pools, each for a volume,
@@ -43,7 +43,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceStability.Unstable
 public class AsyncDiskService {
   
-  public static final Log LOG = LogFactory.getLog(AsyncDiskService.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(AsyncDiskService.class);
   
   // ThreadPool core pool size
   private static final int CORE_THREADS_PER_VOLUME = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
index d12c4c1..6d42dc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CombinedIPWhiteList implements IPList {
 
-  public static final Log LOG = LogFactory.getLog(CombinedIPWhiteList.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(CombinedIPWhiteList.class);
   private static final String LOCALHOST_IP = "127.0.0.1";
 
   private final IPList[] networkLists;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
index 6ee1212..146f65c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.util;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
@@ -29,9 +32,6 @@ import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 /**
  * FileBasedIPList loads a list of subnets in CIDR format and ip addresses from
  * a file.
@@ -43,7 +43,8 @@ import org.apache.commons.logging.LogFactory;
  */
 public class FileBasedIPList implements IPList {
 
-  private static final Log LOG = LogFactory.getLog(FileBasedIPList.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FileBasedIPList.class);
 
   private final String fileName;
   private final MachineList addressList;
@@ -107,7 +108,7 @@ public class FileBasedIPList implements IPList {
         }
       }
     } catch (IOException ioe) {
-      LOG.error(ioe);
+      LOG.error(ioe.toString());
       throw ioe;
     }
     return null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java
index e4a8d0f..fbc1418 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.util;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link GSet} is set,
@@ -35,7 +35,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
  */
 @InterfaceAudience.Private
 public interface GSet<K, E extends K> extends Iterable<E> {
-  static final Log LOG = LogFactory.getLog(GSet.class);
+  Logger LOG = LoggerFactory.getLogger(GSet.class);
 
   /**
    * @return The size of this set.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
index 528865b..7b0a25c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
@@ -34,8 +34,6 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +43,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <code>GenericOptionsParser</code> is a utility to parse command line
@@ -113,7 +113,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 @InterfaceStability.Evolving
 public class GenericOptionsParser {
 
-  private static final Log LOG = LogFactory.getLog(GenericOptionsParser.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(GenericOptionsParser.class);
   private Configuration conf;
   private CommandLine commandLine;
   private final boolean parseSuccessful;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
index b7adc38..dab7d62 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
@@ -31,11 +31,11 @@ import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.Node;
@@ -47,7 +47,8 @@ import org.xml.sax.SAXException;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class HostsFileReader {
-  private static final Log LOG = LogFactory.getLog(HostsFileReader.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HostsFileReader.class);
 
   private final AtomicReference<HostDetails> current;
 
@@ -171,7 +172,7 @@ public class HostsFileReader {
         }
       }
     } catch (IOException|SAXException|ParserConfigurationException e) {
-      LOG.fatal("error parsing " + filename, e);
+      LOG.error("error parsing " + filename, e);
       throw new RuntimeException(e);
     } finally {
       fileInputStream.close();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java
index 0512d4a..1ffb7db 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java
@@ -21,11 +21,11 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.NoSuchElementException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implements an intrusive doubly-linked list.
@@ -298,7 +298,8 @@ public class IntrusiveCollection<E extends IntrusiveCollection.Element>
     return true;
   }
 
-  public static final Log LOG = LogFactory.getLog(IntrusiveCollection.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(IntrusiveCollection.class);
 
   @Override
   public boolean remove(Object o) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
index 80d4468..420ac8b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
@@ -24,8 +24,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.service.AbstractService;
@@ -35,6 +33,8 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class which sets up a simple thread which runs in a loop sleeping
@@ -45,7 +45,7 @@ import com.google.common.collect.Sets;
  */
 @InterfaceAudience.Private
 public class JvmPauseMonitor extends AbstractService {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       JvmPauseMonitor.class);
 
   /** The target sleep time */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
index 2e6c079..b01330f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
@@ -26,12 +26,12 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.util.SubnetUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.net.InetAddresses;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Container class which holds a list of ip/host addresses and 
@@ -43,7 +43,7 @@ import com.google.common.net.InetAddresses;
 
 public class MachineList {
   
-  public static final Log LOG = LogFactory.getLog(MachineList.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MachineList.class);
   public static final String WILDCARD_VALUE = "*";
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
index ff5803c..2578ae7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A helper to load the native hadoop code i.e. libhadoop.so.
@@ -35,8 +35,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 @InterfaceStability.Unstable
 public class NativeCodeLoader {
 
-  private static final Log LOG =
-    LogFactory.getLog(NativeCodeLoader.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(NativeCodeLoader.class);
   
   private static boolean nativeCodeLoaded = false;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java
index fc392c4..cf1e460 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java
@@ -25,8 +25,6 @@ import java.util.Arrays;
 import java.util.Timer;
 import java.util.TimerTask;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.service.AbstractService;
@@ -34,6 +32,8 @@ import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * 
@@ -43,7 +43,8 @@ import org.apache.hadoop.util.StringUtils;
  */
 public class NodeHealthScriptRunner extends AbstractService {
 
-  private static Log LOG = LogFactory.getLog(NodeHealthScriptRunner.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(NodeHealthScriptRunner.class);
 
   /** Absolute path to the health script. */
   private String nodeHealthScript;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
index 3fbc935..bd1c0f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.util;
 
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Utility to assist with generation of progress reports.  Applications build
  * a hierarchy of {@link Progress} instances, each modelling a phase of
@@ -33,7 +33,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.LimitedPrivate({"MapReduce"})
 @InterfaceStability.Unstable
 public class Progress {
-  private static final Log LOG = LogFactory.getLog(Progress.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Progress.class);
   private String status = "";
   private float progress;
   private int currentPhase;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
index e3f0077..41907fb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.util;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -47,7 +47,8 @@ public class ShutdownHookManager {
 
   private static final ShutdownHookManager MGR = new ShutdownHookManager();
 
-  private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ShutdownHookManager.class);
   private static final long TIMEOUT_DEFAULT = 10;
   private static final TimeUnit TIME_UNIT_DEFAULT = TimeUnit.SECONDS;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
index ffd88fb..5405d77 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.util;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
@@ -29,7 +29,8 @@ import java.util.concurrent.TimeUnit;
  * Helper class to shutdown {@link Thread}s and {@link ExecutorService}s.
  */
 public class ShutdownThreadsHelper {
-  private static Log LOG = LogFactory.getLog(ShutdownThreadsHelper.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ShutdownThreadsHelper.class);
 
   @VisibleForTesting
   static final int SHUTDOWN_WAIT_MS = 3000;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
index bba1631..dde4977 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
@@ -32,11 +32,11 @@ import java.util.regex.Pattern;
 
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Plugin to calculate resource information on Linux systems.
@@ -44,8 +44,7 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class SysInfoLinux extends SysInfo {
-  private static final Log LOG =
-      LogFactory.getLog(SysInfoLinux.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SysInfoLinux.class);
 
   /**
    * proc's meminfo virtual file has keys-values in the format

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
index e21adac..2007ab3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
@@ -21,11 +21,11 @@ import java.io.IOException;
 
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Plugin to calculate resource information on Windows systems.
@@ -34,7 +34,8 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 @InterfaceStability.Evolving
 public class SysInfoWindows extends SysInfo {
 
-  private static final Log LOG = LogFactory.getLog(SysInfoWindows.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SysInfoWindows.class);
 
   private long vmemSize;
   private long memSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
index 86f523a..d23a3dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
@@ -17,15 +17,14 @@
  */
 package org.apache.hadoop.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceStability.Evolving
 public class ThreadUtil {
   
-  private static final Log LOG = LogFactory.getLog(ThreadUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ThreadUtil.class);
 
   /**
    * Cause the current thread to sleep as close as possible to the provided

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
index ca994d6..a43ebfa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
@@ -22,11 +22,11 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class returns build information about Hadoop components.
@@ -34,7 +34,7 @@ import org.apache.hadoop.io.IOUtils;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class VersionInfo {
-  private static final Log LOG = LogFactory.getLog(VersionInfo.class);
+  private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class);
 
   private Properties info;
 
@@ -50,7 +50,7 @@ public class VersionInfo {
       }
       info.load(is);
     } catch (IOException ex) {
-      LogFactory.getLog(getClass()).warn("Could not read '" +
+      LoggerFactory.getLogger(getClass()).warn("Could not read '" +
           versionInfoFile + "', " + ex.toString(), ex);
     } finally {
       IOUtils.closeStream(is);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java
index d687867..61eb777 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.util.concurrent;
 
 import com.google.common.util.concurrent.AbstractFuture;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
@@ -29,7 +29,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 /** A {@link Future} implemented using an {@link AsyncGet} object. */
 public class AsyncGetFuture<T, E extends Throwable> extends AbstractFuture<T> {
-  public static final Log LOG = LogFactory.getLog(AsyncGetFuture.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(AsyncGetFuture.class);
 
   private final AtomicBoolean called = new AtomicBoolean(false);
   private final AsyncGet<T, E> asyncGet;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java
index 3bc9ed9..5f22f93 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java
@@ -20,8 +20,8 @@
 
 package org.apache.hadoop.util.concurrent;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
@@ -29,8 +29,8 @@ import java.util.concurrent.Future;
 /** Helper functions for Executors. */
 public final class ExecutorHelper {
 
-  private static final Log LOG = LogFactory
-      .getLog(ExecutorHelper.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ExecutorHelper.class);
 
   static void logThrowableFromAfterExecute(Runnable r, Throwable t) {
     if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java
index 8d910b6..78e729b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java
@@ -20,8 +20,8 @@
 
 package org.apache.hadoop.util.concurrent;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.RejectedExecutionHandler;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
@@ -32,8 +32,8 @@ import java.util.concurrent.ThreadFactory;
 public class HadoopScheduledThreadPoolExecutor extends
     ScheduledThreadPoolExecutor {
 
-  private static final Log LOG = LogFactory
-      .getLog(HadoopScheduledThreadPoolExecutor.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HadoopScheduledThreadPoolExecutor.class);
 
   public HadoopScheduledThreadPoolExecutor(int corePoolSize) {
     super(corePoolSize);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java
index bcf26cb..fa845b7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java
@@ -20,8 +20,8 @@
 
 package org.apache.hadoop.util.concurrent;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.RejectedExecutionHandler;
@@ -34,8 +34,8 @@ import java.util.concurrent.TimeUnit;
  *  */
 public final class HadoopThreadPoolExecutor extends ThreadPoolExecutor {
 
-  private static final Log LOG = LogFactory
-      .getLog(HadoopThreadPoolExecutor.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HadoopThreadPoolExecutor.class);
 
   public HadoopThreadPoolExecutor(int corePoolSize,
       int maximumPoolSize,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
index afcf2ac..b1964e6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.cli;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.cli.util.*;
 import org.apache.hadoop.cli.util.CommandExecutor.Result;
 import org.apache.hadoop.conf.Configuration;
@@ -28,6 +26,9 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
 import org.xml.sax.SAXException;
 import org.xml.sax.helpers.DefaultHandler;
@@ -41,8 +42,8 @@ import java.util.ArrayList;
  * Tests for the Command Line Interface (CLI)
  */
 public class CLITestHelper {
-  private static final Log LOG =
-    LogFactory.getLog(CLITestHelper.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CLITestHelper.class.getName());
   
   // In this mode, it runs the command and compares the actual output
   // with the expected output  

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
index f9c8c16..9183524 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
@@ -26,8 +26,6 @@ import java.nio.ByteOrder;
 import java.util.EnumSet;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.ByteBufferReadable;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FSExceptionMessages;
@@ -43,9 +41,11 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class CryptoStreamsTestBase {
-  protected static final Log LOG = LogFactory.getLog(
+  protected static final Logger LOG = LoggerFactory.getLogger(
       CryptoStreamsTestBase.class);
 
   protected static CryptoCodec codec;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
index 52e547b..a856688 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
@@ -31,8 +31,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -46,9 +44,12 @@ import org.junit.Before;
 import org.junit.Test;
 
 import com.google.common.primitives.Longs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestCryptoCodec {
-  private static final Log LOG= LogFactory.getLog(TestCryptoCodec.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestCryptoCodec.class);
   private static byte[] key = new byte[16];
   private static byte[] iv = new byte[16];
   private static final int bufferSize = 4096;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
index 2e208d2..4c943bb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
@@ -32,8 +32,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
@@ -41,6 +39,8 @@ import org.junit.Test;
 
 import com.google.common.base.Supplier;
 import com.google.common.util.concurrent.Uninterruptibles;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -48,7 +48,8 @@ import com.google.common.util.concurrent.Uninterruptibles;
  * </p>
  */
 public abstract class FCStatisticsBaseTest {
-  private static final Log LOG = LogFactory.getLog(FCStatisticsBaseTest.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FCStatisticsBaseTest.class);
 
   static protected int blockSize = 512;
   static protected int numBlocks = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
index 584ca40..080c515 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
@@ -17,15 +17,16 @@
  */
 package org.apache.hadoop.fs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.fail;
 
 public class TestFileContext {
-  private static final Log LOG = LogFactory.getLog(TestFileContext.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestFileContext.class);
 
   @Test
   public void testDefaultURIWithoutScheme() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
index dd5279d..28710e2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
@@ -31,16 +31,16 @@ import java.util.Collections;
 import java.util.List;
 
 import org.junit.Test;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestFileStatus {
 
-  private static final Log LOG =
-    LogFactory.getLog(TestFileStatus.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestFileStatus.class);
   
   /** Values for creating {@link FileStatus} in some tests */
   static final int LENGTH = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
index a1e81ec..e156ec6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
@@ -48,8 +48,6 @@ import java.util.jar.Manifest;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipOutputStream;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
@@ -61,9 +59,11 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestFileUtil {
-  private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestFileUtil.class);
 
   private static final File TEST_DIR = GenericTestUtils.getTestDir("fu");
   private static final String FILE = "x";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
index 9e199ca..3dd325f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
@@ -29,17 +29,17 @@ import static org.junit.Assume.assumeTrue;
 import java.io.File;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestFsShellCopy {
-  static final Log LOG = LogFactory.getLog(TestFsShellCopy.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestFsShellCopy.class);
 
   static Configuration conf;
   static FsShell shell; 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
index 46c4491..024a076 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
@@ -32,8 +32,6 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.shell.FsCommand;
 import org.apache.hadoop.fs.shell.PathData;
@@ -44,14 +42,16 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This test validates that chmod, chown, chgrp returning correct exit codes
  * 
  */
 public class TestFsShellReturnCode {
-  private static final Log LOG = LogFactory
-      .getLog("org.apache.hadoop.fs.TestFsShellReturnCode");
+  private static final Logger LOG = LoggerFactory
+      .getLogger("org.apache.hadoop.fs.TestFsShellReturnCode");
 
   private static final Configuration conf = new Configuration();
   private static FileSystem fileSys;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
index 89c886e..5fe4e39 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
@@ -21,17 +21,17 @@ import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
 import static org.junit.Assert.assertThat;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestFsShellTouch {
-  static final Log LOG = LogFactory.getLog(TestFsShellTouch.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestFsShellTouch.class);
 
   static FsShell shell;
   static LocalFileSystem lfs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
index bacdbb7..a1aa4de 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.fs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
@@ -30,6 +28,8 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.lang.reflect.Method;
@@ -48,7 +48,8 @@ import static org.junit.Assert.assertTrue;
 
 @SuppressWarnings("deprecation")
 public class TestHarFileSystem {
-  public static final Log LOG = LogFactory.getLog(TestHarFileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestHarFileSystem.class);
 
   /**
    * FileSystem methods that must not be overwritten by

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java
index e7766f3..3d202df 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.fs.contract;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.URI;
@@ -39,8 +39,8 @@ import java.net.URISyntaxException;
  */
 public abstract class AbstractBondedFSContract extends AbstractFSContract {
 
-  private static final Log LOG =
-      LogFactory.getLog(AbstractBondedFSContract.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AbstractBondedFSContract.class);
 
   /**
    * Pattern for the option for test filesystems from schema

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
index ca01702..6da5182 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
@@ -32,8 +32,6 @@ import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.CreateFlag;
@@ -49,6 +47,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** The load generator is a tool for testing NameNode behavior under
  * different client loads. Note there is a subclass of this clas that lets 
@@ -129,7 +129,7 @@ import com.google.common.base.Preconditions;
  *   -scriptFile <file name>: text file to parse for scripted operation
  */
 public class LoadGenerator extends Configured implements Tool {
-  public static final Log LOG = LogFactory.getLog(LoadGenerator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class);
   
   private volatile static boolean shouldRun = true;
   protected static Path root = DataGenerator.DEFAULT_ROOT;
@@ -341,7 +341,7 @@ public class LoadGenerator extends Configured implements Tool {
         executionTime[WRITE_CLOSE] += (Time.now() - startTime);
         numOfOps[WRITE_CLOSE]++;
       } finally {
-        IOUtils.cleanup(LOG, out);
+        IOUtils.cleanupWithLogger(LOG, out);
       }
     }
   }
@@ -651,7 +651,7 @@ public class LoadGenerator extends Configured implements Tool {
       System.err.println("Line: " + lineNum + ", " + e.getMessage());
       return -1;
     } finally {
-      IOUtils.cleanup(LOG, br);
+      IOUtils.cleanupWithLogger(LOG, br);
     }
     
     // Copy vectors to arrays of values, to avoid autoboxing overhead later

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
index 2375081..764ad2e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
@@ -19,18 +19,18 @@ package org.apache.hadoop.ha;
 
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.apache.zookeeper.data.Stat;
 import org.apache.zookeeper.server.ZooKeeperServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class ActiveStandbyElectorTestUtil {
   
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       ActiveStandbyElectorTestUtil.class);
   private static final long LOG_INTERVAL_MS = 500;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java
index 551da56..a15c5d1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java
@@ -23,8 +23,6 @@ import java.net.InetSocketAddress;
 import java.util.ArrayList;
 
 import com.google.protobuf.BlockingService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB;
@@ -38,6 +36,8 @@ import org.apache.hadoop.security.AccessControlException;
 import org.mockito.Mockito;
 
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HA_HM_RPC_TIMEOUT_DEFAULT;
 
@@ -46,7 +46,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HA_HM_RPC_TIMEOUT_DEF
  * a mock implementation.
  */
 class DummyHAService extends HAServiceTarget {
-  public static final Log LOG = LogFactory.getLog(DummyHAService.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(DummyHAService.class);
   private static final String DUMMY_FENCE_KEY = "dummy.fence.key";
   volatile HAServiceState state;
   HAServiceProtocol proxy, healthMonitorProxy;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
index 04f6576..0967c1a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertEquals;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@@ -39,6 +37,8 @@ import org.apache.zookeeper.server.ZooKeeperServer;
 
 import com.google.common.base.Preconditions;
 import com.google.common.primitives.Ints;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Harness for starting two dummy ZK FailoverControllers, associated with
@@ -55,7 +55,8 @@ public class MiniZKFCCluster {
   
   private DummySharedResource sharedResource = new DummySharedResource();
   
-  private static final Log LOG = LogFactory.getLog(MiniZKFCCluster.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MiniZKFCCluster.class);
   
   public MiniZKFCCluster(Configuration conf, ZooKeeperServer zks) {
     this.conf = conf;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
index 1d8f48e..0e59aa1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
@@ -24,8 +24,6 @@ import java.io.IOException;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
 
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 
@@ -34,9 +32,11 @@ import org.junit.Test;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestHAAdmin {
-  private static final Log LOG = LogFactory.getLog(TestHAAdmin.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestHAAdmin.class);
   
   private HAAdmin tool;
   private ByteArrayOutputStream errOutBytes = new ByteArrayOutputStream();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
index 6c46543..8738372 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
@@ -23,8 +23,6 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@@ -34,9 +32,11 @@ import org.apache.hadoop.util.Time;
 
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestHealthMonitor {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       TestHealthMonitor.class);
   
   /** How many times has createProxy been called */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
index 0e4a1ca..7036175 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
@@ -33,14 +33,14 @@ import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestGlobalFilter extends HttpServerFunctionalTest {
-  static final Log LOG = LogFactory.getLog(HttpServer2.class);
+  static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
   static final Set<String> RECORDS = new TreeSet<String>(); 
 
   /** A very simple filter that records accessed uri's */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
index b8c5e11..27d6b07 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.http;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -40,6 +38,8 @@ import org.mockito.Mockito;
 import org.mockito.internal.util.reflection.Whitebox;
 import org.mortbay.jetty.Connector;
 import org.mortbay.util.ajax.JSON;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -72,7 +72,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_S
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
 
 public class TestHttpServer extends HttpServerFunctionalTest {
-  static final Log LOG = LogFactory.getLog(TestHttpServer.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class);
   private static HttpServer2 server;
   private static final int MAX_THREADS = 10;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java
index d72a958..afd06ac 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java
@@ -18,8 +18,6 @@
 package org.apache.hadoop.http;
 
 import org.apache.http.HttpStatus;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.http.resource.JerseyResource;
@@ -27,12 +25,14 @@ import org.apache.hadoop.net.NetUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.net.HttpURLConnection;
 import java.net.URL;
 
 public class TestHttpServerLogs extends HttpServerFunctionalTest {
-  static final Log LOG = LogFactory.getLog(TestHttpServerLogs.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestHttpServerLogs.class);
   private static HttpServer2 server;
 
   @BeforeClass

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java
index c92944e..07dbc2a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.http;
 
 
 import org.junit.Test;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.FileNotFoundException;
 
@@ -28,7 +28,8 @@ import java.io.FileNotFoundException;
  * Test webapp loading
  */
 public class TestHttpServerWebapps extends HttpServerFunctionalTest {
-  private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestHttpServerWebapps.class);
 
   /**
    * Test that the test server is loadable on the classpath
@@ -58,7 +59,7 @@ public class TestHttpServerWebapps extends HttpServerFunctionalTest {
       stop(server);
       fail("Expected an exception, got " + serverDescription);
     } catch (FileNotFoundException expected) {
-      log.debug("Expected exception " + expected, expected);
+      LOG.debug("Expected exception " + expected, expected);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java
index cbdda90..908ccd0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.http;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.minikdc.MiniKdc;
@@ -38,6 +36,8 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileWriter;
@@ -53,7 +53,8 @@ import static org.junit.Assert.assertTrue;
  */
 public class TestHttpServerWithSpengo {
 
-  static final Log LOG = LogFactory.getLog(TestHttpServerWithSpengo.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(TestHttpServerWithSpengo.class);
 
   private static final String SECRET_STR = "secret";
   private static final String HTTP_USER = "HTTP";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
index 09f31df..4c35b39 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
@@ -33,14 +33,14 @@ import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestPathFilter extends HttpServerFunctionalTest {
-  static final Log LOG = LogFactory.getLog(HttpServer2.class);
+  static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
   static final Set<String> RECORDS = new TreeSet<String>(); 
 
   /** A very simple filter that records accessed uri's */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
index f52a055..3c68986 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
@@ -32,8 +32,6 @@ import javax.net.ssl.SSLHandshakeException;
 import javax.net.ssl.SSLSocket;
 import javax.net.ssl.SSLSocketFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.IOUtils;
@@ -45,6 +43,8 @@ import org.apache.hadoop.util.StringUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This testcase issues SSL certificates configures the HttpServer to serve
@@ -56,7 +56,8 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
   private static final String BASEDIR =
       GenericTestUtils.getTempPath(TestSSLHttpServer.class.getSimpleName());
 
-  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestSSLHttpServer.class);
   private static Configuration conf;
   private static HttpServer2 server;
   private static String keystoresDir;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
index 6b17ccc..b5136ca 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
@@ -32,15 +32,15 @@ import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestServletFilter extends HttpServerFunctionalTest {
-  static final Log LOG = LogFactory.getLog(HttpServer2.class);
+  static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
   static volatile String uri = null; 
 
   /** A very simple filter which record the uri filtered. */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
index f1313e2..2203a04 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
@@ -30,9 +30,9 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.mortbay.util.ajax.JSON;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple Jersey resource class TestHttpServer.
@@ -41,7 +41,7 @@ import org.mortbay.util.ajax.JSON;
  */
 @Path("")
 public class JerseyResource {
-  static final Log LOG = LogFactory.getLog(JerseyResource.class);
+  static final Logger LOG = LoggerFactory.getLogger(JerseyResource.class);
 
   public static final String PATH = "path";
   public static final String OP = "op";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
index 505aca7..722e9de 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
@@ -21,13 +21,15 @@ package org.apache.hadoop.io;
 import java.io.*;
 
 
-import org.apache.commons.logging.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.conf.*;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertEquals;
@@ -37,7 +39,8 @@ import static org.junit.Assert.fail;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestArrayFile {
-  private static final Log LOG = LogFactory.getLog(TestArrayFile.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestArrayFile.class);
   
   private static final Path TEST_DIR = new Path(GenericTestUtils.getTempPath(
       TestMapFile.class.getSimpleName()));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java
index bd8f2ef..b70e011 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java
@@ -21,16 +21,18 @@ package org.apache.hadoop.io;
 import java.io.IOException;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.assertEquals;
 
 public class TestDefaultStringifier {
 
   private static Configuration conf = new Configuration();
-  private static final Log LOG = LogFactory.getLog(TestDefaultStringifier.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestDefaultStringifier.class);
 
   private char[] alphabet = "abcdefghijklmnopqrstuvwxyz".toCharArray();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
index e97ab6a..0448243 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.io;
 import java.io.*;
 import java.util.*;
 
-import org.apache.commons.logging.*;
-
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.SequenceFile.Metadata;
@@ -40,11 +38,14 @@ import static org.junit.Assert.fail;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /** Support for flat files of binary key/value pairs. */
 public class TestSequenceFile {
-  private static final Log LOG = LogFactory.getLog(TestSequenceFile.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestSequenceFile.class);
 
   private Configuration conf = new Configuration();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
index 1fcfab6..b6ec487 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
@@ -21,14 +21,13 @@ package org.apache.hadoop.io;
 import java.io.*;
 import java.util.*;
 
-
-import org.apache.commons.logging.*;
-
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
@@ -37,7 +36,7 @@ import static org.junit.Assert.fail;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestSetFile {
-  private static final Log LOG = LogFactory.getLog(TestSetFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestSetFile.class);
   private static String FILE = GenericTestUtils.getTempPath("test.set");
 
   private static Configuration conf = new Configuration();


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[5/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Posted by aa...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
index d4514c6..519fcd7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
@@ -22,8 +22,6 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +34,8 @@ import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
 import org.apache.hadoop.util.bloom.Key;
 import org.apache.hadoop.util.hash.Hash;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_KEY;
@@ -52,7 +52,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOO
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class BloomMapFile {
-  private static final Log LOG = LogFactory.getLog(BloomMapFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BloomMapFile.class);
   public static final String BLOOM_FILE_NAME = "bloom";
   public static final int HASH_COUNT = 5;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
index a3fea31..d5ab4d2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
@@ -22,11 +22,10 @@ import java.nio.ByteOrder;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import sun.misc.Unsafe;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import com.google.common.primitives.Longs;
 import com.google.common.primitives.UnsignedBytes;
 
@@ -36,7 +35,7 @@ import com.google.common.primitives.UnsignedBytes;
  * class to be able to compare arrays that start at non-zero offsets.
  */
 abstract class FastByteComparisons {
-  static final Log LOG = LogFactory.getLog(FastByteComparisons.class);
+  static final Logger LOG = LoggerFactory.getLogger(FastByteComparisons.class);
 
   /**
    * Lexicographically compare two byte arrays.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
index b639edc..6142cd6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
@@ -32,13 +32,13 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
@@ -49,7 +49,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class IOUtils {
-  public static final Log LOG = LogFactory.getLog(IOUtils.class);
+  public static final Logger LOG = LoggerFactory.getLogger(IOUtils.class);
 
   /**
    * Copies from one stream to another.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 5ba506a..fde1c86 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -23,8 +23,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -37,6 +35,8 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.util.Options;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_KEY;
@@ -60,7 +60,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SK
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class MapFile {
-  private static final Log LOG = LogFactory.getLog(MapFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapFile.class);
 
   /** The name of the index file. */
   public static final String INDEX_FILE_NAME = "index";
@@ -1002,7 +1002,7 @@ public class MapFile {
       while (reader.next(key, value))               // copy all entries
         writer.append(key, value);
     } finally {
-      IOUtils.cleanup(LOG, writer, reader);
+      IOUtils.cleanupWithLogger(LOG, writer, reader);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
index a8c0690..2e65f12 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
@@ -23,8 +23,6 @@ import java.util.concurrent.ArrayBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.nativeio.NativeIO;
@@ -33,6 +31,8 @@ import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_WILLNEED;
 
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Manages a pool of threads which can issue readahead requests on file descriptors.
@@ -40,7 +40,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class ReadaheadPool {
-  static final Log LOG = LogFactory.getLog(ReadaheadPool.class);
+  static final Logger LOG = LoggerFactory.getLogger(ReadaheadPool.class);
   private static final int POOL_SIZE = 4;
   private static final int MAX_POOL_SIZE = 16;
   private static final int CAPACITY = 1024;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index 16ee874..253acea 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -24,7 +24,6 @@ import java.util.*;
 import java.rmi.server.UID;
 import java.security.MessageDigest;
 
-import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Options;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.Options.CreateOpts;
@@ -50,6 +49,8 @@ import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.MergeSort;
 import org.apache.hadoop.util.PriorityQueue;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
@@ -202,7 +203,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_SKIP_CHECKSU
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class SequenceFile {
-  private static final Log LOG = LogFactory.getLog(SequenceFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SequenceFile.class);
 
   private SequenceFile() {}                         // no public ctor
 
@@ -1893,7 +1894,7 @@ public class SequenceFile {
         succeeded = true;
       } finally {
         if (!succeeded) {
-          IOUtils.cleanup(LOG, this.in);
+          IOUtils.cleanupWithLogger(LOG, this.in);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
index 89f1e42..f5d33a1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
@@ -25,9 +25,10 @@ import java.io.UTFDataFormatException;
 
 import org.apache.hadoop.util.StringUtils;
 
-import org.apache.commons.logging.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** A WritableComparable for strings that uses the UTF8 encoding.
  * 
@@ -42,7 +43,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Stable
 public class UTF8 implements WritableComparable<UTF8> {
-  private static final Log LOG= LogFactory.getLog(UTF8.class);
+  private static final Logger LOG= LoggerFactory.getLogger(UTF8.class);
   private static final DataInputBuffer IBUF = new DataInputBuffer();
 
   private static final ThreadLocal<DataOutputBuffer> OBUF_FACTORY =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
index 01bffa7..f103aad 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
@@ -23,8 +23,6 @@ import java.util.Set;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -33,6 +31,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A global compressor/decompressor pool used to save and reuse 
@@ -41,7 +41,7 @@ import com.google.common.cache.LoadingCache;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class CodecPool {
-  private static final Log LOG = LogFactory.getLog(CodecPool.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class);
   
   /**
    * A global compressor pool used to save the expensive 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
index 8fff75d..3701f20 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.io.compress;
 
 import java.util.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -28,6 +26,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A factory that will find the correct codec for a given filename.
@@ -36,8 +36,8 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceStability.Evolving
 public class CompressionCodecFactory {
 
-  public static final Log LOG =
-    LogFactory.getLog(CompressionCodecFactory.class.getName());
+  public static final Logger LOG =
+      LoggerFactory.getLogger(CompressionCodecFactory.class.getName());
   
   private static final ServiceLoader<CompressionCodec> CODEC_PROVIDERS =
     ServiceLoader.load(CompressionCodec.class);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
index 31196cc..33f39ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
@@ -22,14 +22,14 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.zlib.ZlibDecompressor;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
@@ -37,7 +37,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class DefaultCodec implements Configurable, CompressionCodec, DirectDecompressionCodec {
-  private static final Log LOG = LogFactory.getLog(DefaultCodec.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DefaultCodec.class);
   
   Configuration conf;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
index a973dc9..d4a9787 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
@@ -24,9 +24,8 @@ import java.nio.ByteBuffer;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Compressor} based on the popular 
@@ -42,7 +41,8 @@ public class Bzip2Compressor implements Compressor {
   static final int DEFAULT_BLOCK_SIZE = 9;
   static final int DEFAULT_WORK_FACTOR = 30;
 
-  private static final Log LOG = LogFactory.getLog(Bzip2Compressor.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Bzip2Compressor.class);
 
   private long stream;
   private int blockSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
index 3135165..96693ad 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
@@ -23,9 +23,8 @@ import java.nio.Buffer;
 import java.nio.ByteBuffer;
 
 import org.apache.hadoop.io.compress.Decompressor;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Decompressor} based on the popular 
@@ -36,7 +35,8 @@ import org.apache.commons.logging.LogFactory;
 public class Bzip2Decompressor implements Decompressor {
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024;
   
-  private static final Log LOG = LogFactory.getLog(Bzip2Decompressor.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Bzip2Decompressor.class);
 
   private long stream;
   private boolean conserveMemory;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
index 0bbcc36..7ddae77 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.io.compress.bzip2;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.NativeCodeLoader;
@@ -30,6 +28,8 @@ import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor;
 import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor;
 import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor;
 import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A collection of factories to create the right 
@@ -37,7 +37,7 @@ import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
  * 
  */
 public class Bzip2Factory {
-  private static final Log LOG = LogFactory.getLog(Bzip2Factory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Bzip2Factory.class);
 
   private static String bzip2LibraryName = "";
   private static boolean nativeBzip2Loaded;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java
index ccfae8b..3792c36 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java
@@ -22,19 +22,19 @@ import java.io.IOException;
 import java.nio.Buffer;
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Compressor} based on the lz4 compression algorithm.
  * http://code.google.com/p/lz4/
  */
 public class Lz4Compressor implements Compressor {
-  private static final Log LOG =
-      LogFactory.getLog(Lz4Compressor.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Lz4Compressor.class.getName());
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024;
 
   private int directBufferSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
index 685956c..f26ae84 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
@@ -22,18 +22,18 @@ import java.io.IOException;
 import java.nio.Buffer;
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Decompressor} based on the lz4 compression algorithm.
  * http://code.google.com/p/lz4/
  */
 public class Lz4Decompressor implements Decompressor {
-  private static final Log LOG =
-      LogFactory.getLog(Lz4Compressor.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Lz4Compressor.class.getName());
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024;
 
   private int directBufferSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
index 814718d..3d38680 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
@@ -22,19 +22,19 @@ import java.io.IOException;
 import java.nio.Buffer;
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Compressor} based on the snappy compression algorithm.
  * http://code.google.com/p/snappy/
  */
 public class SnappyCompressor implements Compressor {
-  private static final Log LOG =
-      LogFactory.getLog(SnappyCompressor.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SnappyCompressor.class.getName());
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024;
 
   private int directBufferSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
index 8712431..f31b76c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
@@ -22,19 +22,19 @@ import java.io.IOException;
 import java.nio.Buffer;
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.DirectDecompressor;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Decompressor} based on the snappy compression algorithm.
  * http://code.google.com/p/snappy/
  */
 public class SnappyDecompressor implements Decompressor {
-  private static final Log LOG =
-      LogFactory.getLog(SnappyDecompressor.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SnappyDecompressor.class.getName());
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024;
 
   private int directBufferSize;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
index 509456e..739788f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
@@ -23,9 +23,8 @@ import java.util.zip.Deflater;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A wrapper around java.util.zip.Deflater to make it conform 
@@ -34,7 +33,8 @@ import org.apache.commons.logging.LogFactory;
  */
 public class BuiltInZlibDeflater extends Deflater implements Compressor {
 
-  private static final Log LOG = LogFactory.getLog(BuiltInZlibDeflater.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BuiltInZlibDeflater.class);
 
   public BuiltInZlibDeflater(int level, boolean nowrap) {
     super(level, nowrap);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
index 24d98a5..d7b153b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
@@ -25,9 +25,8 @@ import java.nio.ByteBuffer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.util.NativeCodeLoader;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Compressor} based on the popular 
@@ -37,7 +36,8 @@ import org.apache.commons.logging.LogFactory;
  */
 public class ZlibCompressor implements Compressor {
 
-  private static final Log LOG = LogFactory.getLog(ZlibCompressor.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ZlibCompressor.class);
 
   private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
index 4112d27..a2bad42 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.io.compress.zlib;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
@@ -28,6 +26,8 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A collection of factories to create the right 
@@ -35,8 +35,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
  * 
  */
 public class ZlibFactory {
-  private static final Log LOG =
-    LogFactory.getLog(ZlibFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ZlibFactory.class);
 
   private static boolean nativeZlibLoaded = false;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
index ce93266..43d8299 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
@@ -30,8 +30,6 @@ import java.util.Arrays;
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -43,6 +41,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarComparator;
 import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarLong;
 import org.apache.hadoop.io.file.tfile.Compression.Algorithm;
 import org.apache.hadoop.io.file.tfile.Utils.Version;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Block Compressed file, the underlying physical storage layer for TFile.
@@ -54,7 +54,7 @@ final class BCFile {
   // the current version of BCFile impl, increment them (major or minor) made
   // enough changes
   static final Version API_VERSION = new Version((short) 1, (short) 0);
-  static final Log LOG = LogFactory.getLog(BCFile.class);
+  static final Logger LOG = LoggerFactory.getLogger(BCFile.class);
 
   /**
    * Prevent the instantiation of BCFile objects.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java
index f7ec7ac..2298dc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java
@@ -24,8 +24,6 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.CodecPool;
 import org.apache.hadoop.io.compress.CompressionCodec;
@@ -35,6 +33,8 @@ import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY;
@@ -44,7 +44,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_
  * Compression related stuff.
  */
 final class Compression {
-  static final Log LOG = LogFactory.getLog(Compression.class);
+  static final Logger LOG = LoggerFactory.getLogger(Compression.class);
 
   /**
    * Prevent the instantiation of class.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
index 56739c6..c63baa5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
@@ -29,8 +29,6 @@ import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Comparator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -51,6 +49,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.BytesComparator;
 import org.apache.hadoop.io.file.tfile.CompareUtils.MemcmpRawComparator;
 import org.apache.hadoop.io.file.tfile.Utils.Version;
 import org.apache.hadoop.io.serializer.JavaSerializationComparator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A TFile is a container of key-value pairs. Both keys and values are type-less
@@ -131,7 +131,7 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class TFile {
-  static final Log LOG = LogFactory.getLog(TFile.class);
+  static final Logger LOG = LoggerFactory.getLogger(TFile.class);
 
   private static final String CHUNK_BUF_SIZE_ATTR = "tfile.io.chunk.size";
   private static final String FS_INPUT_BUF_SIZE_ATTR =
@@ -335,7 +335,7 @@ public class TFile {
           writerBCF.close();
         }
       } finally {
-        IOUtils.cleanup(LOG, blkAppender, writerBCF);
+        IOUtils.cleanupWithLogger(LOG, blkAppender, writerBCF);
         blkAppender = null;
         writerBCF = null;
         state = State.CLOSED;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
index 84b92ec..3ef6b27 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
@@ -25,8 +25,6 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -36,12 +34,14 @@ import org.apache.hadoop.io.file.tfile.BCFile.BlockRegion;
 import org.apache.hadoop.io.file.tfile.BCFile.MetaIndexEntry;
 import org.apache.hadoop.io.file.tfile.TFile.TFileIndexEntry;
 import org.apache.hadoop.io.file.tfile.Utils.Version;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Dumping the information of a TFile.
  */
 class TFileDumper {
-  static final Log LOG = LogFactory.getLog(TFileDumper.class);
+  static final Logger LOG = LoggerFactory.getLogger(TFileDumper.class);
 
   private TFileDumper() {
     // namespace object not constructable.
@@ -290,7 +290,7 @@ class TFileDumper {
         }
       }
     } finally {
-      IOUtils.cleanup(LOG, reader, fsdis);
+      IOUtils.cleanupWithLogger(LOG, reader, fsdis);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
index 7e9283e..d95efb6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
@@ -40,9 +40,9 @@ import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.PerformanceAdvisory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import sun.misc.Unsafe;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -98,7 +98,7 @@ public class NativeIO {
        write.  */
     public static int SYNC_FILE_RANGE_WAIT_AFTER = 4;
 
-    private static final Log LOG = LogFactory.getLog(NativeIO.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class);
 
     // Set to true via JNI if possible
     public static boolean fadvisePossible = false;
@@ -634,7 +634,7 @@ public class NativeIO {
     }
   }
 
-  private static final Log LOG = LogFactory.getLog(NativeIO.class);
+  private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class);
 
   private static boolean nativeLoaded = false;
 
@@ -940,10 +940,10 @@ public class NativeIO {
           position += transferred;
         }
       } finally {
-        IOUtils.cleanup(LOG, output);
-        IOUtils.cleanup(LOG, fos);
-        IOUtils.cleanup(LOG, input);
-        IOUtils.cleanup(LOG, fis);
+        IOUtils.cleanupWithLogger(LOG, output);
+        IOUtils.cleanupWithLogger(LOG, fos);
+        IOUtils.cleanupWithLogger(LOG, input);
+        IOUtils.cleanupWithLogger(LOG, fis);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java
index 306244a..4126344 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java
@@ -22,10 +22,10 @@ import java.io.IOException;
 import java.io.FileDescriptor;
 
 import org.apache.commons.lang.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A factory for creating shared file descriptors inside a given directory.
@@ -45,7 +45,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class SharedFileDescriptorFactory {
-  public static final Log LOG = LogFactory.getLog(SharedFileDescriptorFactory.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SharedFileDescriptorFactory.class);
   private final String prefix;
   private final String path;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
index d6f3e04..fa0cb6e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
@@ -32,8 +32,6 @@ import java.util.Map.Entry;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.StandbyException;
@@ -41,6 +39,8 @@ import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -49,7 +49,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class RetryPolicies {
   
-  public static final Log LOG = LogFactory.getLog(RetryPolicies.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RetryPolicies.class);
   
   /**
    * <p>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
index 15a9b54..1f5acfe 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
@@ -19,17 +19,17 @@ package org.apache.hadoop.io.retry;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.retry.RetryPolicies.MultipleLinearRandomRetry;
 import org.apache.hadoop.ipc.RemoteException;
 
 import com.google.protobuf.ServiceException;
 import org.apache.hadoop.ipc.RetriableException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class RetryUtils {
-  public static final Log LOG = LogFactory.getLog(RetryUtils.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RetryUtils.class);
   
   /**
    * Return the default retry policy set in conf.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
index aa3c86a..a9787a0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.io.serializer;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -31,6 +29,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
 import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -41,8 +41,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 @InterfaceStability.Evolving
 public class SerializationFactory extends Configured {
   
-  private static final Log LOG =
-    LogFactory.getLog(SerializationFactory.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SerializationFactory.class.getName());
 
   private List<Serialization<?>> serializations = new ArrayList<Serialization<?>>();
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
index 2764788..d1bd180 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
@@ -28,20 +28,21 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Abstracts queue operations for different blocking queues.
  */
 public class CallQueueManager<E extends Schedulable>
     extends AbstractQueue<E> implements BlockingQueue<E> {
-  public static final Log LOG = LogFactory.getLog(CallQueueManager.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(CallQueueManager.class);
   // Number of checkpoints for empty queue.
   private static final int CHECKPOINT_NUM = 20;
   // Interval to check empty queue.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
index 4164c7d..c225d99 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.ipc;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -57,6 +55,8 @@ import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.concurrent.AsyncGet;
 import org.apache.htrace.core.Span;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.SocketFactory;
 import javax.security.sasl.Sasl;
@@ -84,7 +84,7 @@ import static org.apache.hadoop.ipc.RpcConstants.PING_CALL_ID;
 @InterfaceStability.Evolving
 public class Client implements AutoCloseable {
   
-  public static final Log LOG = LogFactory.getLog(Client.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Client.class);
 
   /** A counter for generating call IDs. */
   private static final AtomicInteger callIdCounter = new AtomicInteger();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java
index 8bcaf05..20161b8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java
@@ -33,11 +33,11 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
 import org.apache.hadoop.metrics2.util.MBeans;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A queue with multiple levels for each priority.
@@ -50,7 +50,7 @@ public class FairCallQueue<E extends Schedulable> extends AbstractQueue<E>
   public static final String IPC_CALLQUEUE_PRIORITY_LEVELS_KEY =
     "faircallqueue.priority-levels";
 
-  public static final Log LOG = LogFactory.getLog(FairCallQueue.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FairCallQueue.class);
 
   /* The queues */
   private final ArrayList<BlockingQueue<E>> queues;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
index 3c0aaba..190c550 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.ipc;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.*;
 import com.google.protobuf.Descriptors.MethodDescriptor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -39,6 +37,8 @@ import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.concurrent.AsyncGet;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.SocketFactory;
 import java.io.IOException;
@@ -55,7 +55,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 @InterfaceStability.Evolving
 public class ProtobufRpcEngine implements RpcEngine {
-  public static final Log LOG = LogFactory.getLog(ProtobufRpcEngine.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(ProtobufRpcEngine.class);
   private static final ThreadLocal<AsyncGet<Message, Exception>>
       ASYNC_RETURN_MESSAGE = new ThreadLocal<>();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
index 3f68d63..6d96eab 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.net.SocketFactory;
 
-import org.apache.commons.logging.*;
-
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.*;
@@ -59,6 +57,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 
 import com.google.protobuf.BlockingService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** A simple RPC mechanism.
  *
@@ -109,7 +109,7 @@ public class RPC {
         Writable rpcRequest, long receiveTime) throws Exception ;
   }
   
-  static final Log LOG = LogFactory.getLog(RPC.class);
+  static final Logger LOG = LoggerFactory.getLogger(RPC.class);
   
   /**
    * Get all superInterfaces that extend VersionedProtocol

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
index ee84a04..e67e8d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
@@ -24,9 +24,9 @@ import com.google.common.base.Joiner;
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Multimap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Used to registry custom methods to refresh at runtime.
@@ -34,7 +34,8 @@ import org.apache.hadoop.classification.InterfaceStability;
  */
 @InterfaceStability.Unstable
 public class RefreshRegistry {
-  public static final Log LOG = LogFactory.getLog(RefreshRegistry.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(RefreshRegistry.class);
 
   // Used to hold singleton instance
   private static class RegistryHolder {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
index 7b85286..6f6ceb5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
@@ -22,8 +22,6 @@ import java.util.Arrays;
 import java.util.UUID;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.ipc.metrics.RetryCacheMetrics;
 import org.apache.hadoop.util.LightWeightCache;
@@ -32,6 +30,8 @@ import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Maintains a cache of non-idempotent requests that have been successfully
@@ -44,7 +44,7 @@ import com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Private
 public class RetryCache {
-  public static final Log LOG = LogFactory.getLog(RetryCache.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RetryCache.class);
   private final RetryCacheMetrics retryCacheMetrics;
   private static final int MAX_CAPACITY = 16;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index 85b8bfd..a2237bf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -69,8 +69,6 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
@@ -124,6 +122,8 @@ import com.google.protobuf.ByteString;
 import com.google.protobuf.CodedOutputStream;
 import com.google.protobuf.Message;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** An abstract IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -292,9 +292,9 @@ public abstract class Server {
   }
   
 
-  public static final Log LOG = LogFactory.getLog(Server.class);
-  public static final Log AUDITLOG = 
-    LogFactory.getLog("SecurityLogger."+Server.class.getName());
+  public static final Logger LOG = LoggerFactory.getLogger(Server.class);
+  public static final Logger AUDITLOG =
+      LoggerFactory.getLogger("SecurityLogger."+Server.class.getName());
   private static final String AUTH_FAILED_FOR = "Auth failed for ";
   private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for ";
   
@@ -1112,7 +1112,7 @@ public abstract class Server {
           } catch (IOException ex) {
             LOG.error("Error in Reader", ex);
           } catch (Throwable re) {
-            LOG.fatal("Bug in read selector!", re);
+            LOG.error("Bug in read selector!", re);
             ExitUtil.terminate(1, "Bug in read selector!");
           }
         }
@@ -2620,7 +2620,7 @@ public abstract class Server {
           }
         } finally {
           CurCall.set(null);
-          IOUtils.cleanup(LOG, traceScope);
+          IOUtils.cleanupWithLogger(LOG, traceScope);
         }
       }
       LOG.debug(Thread.currentThread().getName() + ": exiting");
@@ -2629,7 +2629,7 @@ public abstract class Server {
   }
 
   @VisibleForTesting
-  void logException(Log logger, Throwable e, Call call) {
+  void logException(Logger logger, Throwable e, Call call) {
     if (exceptionsHandler.isSuppressedLog(e.getClass())) {
       return; // Log nothing.
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java
index cfda947..d308725 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.ipc;
 
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Determines which queue to start reading from, occasionally drawing from
@@ -43,8 +43,8 @@ public class WeightedRoundRobinMultiplexer implements RpcMultiplexer {
   public static final String IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY =
     "faircallqueue.multiplexer.weights";
 
-  public static final Log LOG =
-    LogFactory.getLog(WeightedRoundRobinMultiplexer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(WeightedRoundRobinMultiplexer.class);
 
   private final int numQueues; // The number of queues under our provisioning
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
index df45d62..d122b0d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
@@ -28,8 +28,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.net.SocketFactory;
 
-import org.apache.commons.logging.*;
-
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
@@ -43,11 +41,13 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.*;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** An RpcEngine implementation for Writable data. */
 @InterfaceStability.Evolving
 public class WritableRpcEngine implements RpcEngine {
-  private static final Log LOG = LogFactory.getLog(RPC.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RPC.class);
   
   //writableRpcVersion should be updated if there is a change
   //in format of the rpc messages.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java
index a853d64..fc09e0a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.ipc.metrics;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.ipc.RetryCache;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.annotation.Metric;
@@ -26,6 +24,8 @@ import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is for maintaining the various RetryCache-related statistics
@@ -35,7 +35,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong;
 @Metrics(about="Aggregate RetryCache metrics", context="rpc")
 public class RetryCacheMetrics {
 
-  static final Log LOG = LogFactory.getLog(RetryCacheMetrics.class);
+  static final Logger LOG = LoggerFactory.getLogger(RetryCacheMetrics.class);
   final MetricsRegistry registry;
   final String name;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
index 8b7e995..6ed57ec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.ipc.metrics;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is for maintaining RPC method related statistics
@@ -37,7 +37,7 @@ public class RpcDetailedMetrics {
   @Metric MutableRatesWithAggregation rates;
   @Metric MutableRatesWithAggregation deferredRpcRates;
 
-  static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class);
+  static final Logger LOG = LoggerFactory.getLogger(RpcDetailedMetrics.class);
   final MetricsRegistry registry;
   final String name;
 
@@ -45,7 +45,7 @@ public class RpcDetailedMetrics {
     name = "RpcDetailedActivityForPort"+ port;
     registry = new MetricsRegistry("rpcdetailed")
         .tag("port", "RPC port", String.valueOf(port));
-    LOG.debug(registry.info());
+    LOG.debug(registry.info().toString());
   }
 
   public String name() { return name; }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
index 8ce1379..d53d7d3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.ipc.metrics;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,6 +29,8 @@ import org.apache.hadoop.metrics2.lib.MutableCounterInt;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
 import org.apache.hadoop.metrics2.lib.MutableQuantiles;
 import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is for maintaining  the various RPC statistics
@@ -40,7 +40,7 @@ import org.apache.hadoop.metrics2.lib.MutableRate;
 @Metrics(about="Aggregate RPC metrics", context="rpc")
 public class RpcMetrics {
 
-  static final Log LOG = LogFactory.getLog(RpcMetrics.class);
+  static final Logger LOG = LoggerFactory.getLogger(RpcMetrics.class);
   final Server server;
   final MetricsRegistry registry;
   final String name;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
index 6546c05..c8b67bd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
@@ -17,11 +17,11 @@
 
 package org.apache.hadoop.jmx;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.http.HttpServer2;
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonGenerator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.management.AttributeNotFoundException;
 import javax.management.InstanceNotFoundException;
@@ -116,7 +116,8 @@ import java.util.Set;
  *
  */
 public class JMXJsonServlet extends HttpServlet {
-  private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JMXJsonServlet.class);
   static final String ACCESS_CONTROL_ALLOW_METHODS =
       "Access-Control-Allow-Methods";
   static final String ACCESS_CONTROL_ALLOW_ORIGIN =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java
index 14a3e33..3759df7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.metrics;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Utility class to simplify creation and reporting of hadoop metrics.
@@ -39,8 +39,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceStability.Evolving
 public class MetricsUtil {
     
-  public static final Log LOG =
-    LogFactory.getLog(MetricsUtil.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MetricsUtil.class);
 
   /**
    * Don't allow creation of a new instance of Metrics

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
index 67414c7..c0a278f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
@@ -25,8 +25,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.io.Charsets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -34,6 +32,8 @@ import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
 import org.apache.hadoop.metrics.spi.OutputRecord;
 import org.apache.hadoop.metrics.spi.Util;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Context for sending metrics to Ganglia.
@@ -63,7 +63,7 @@ public class GangliaContext extends AbstractMetricsContext {
   private static final int BUFFER_SIZE = 1500;       // as per libgmond.c
   private static final int DEFAULT_MULTICAST_TTL = 1;
 
-  private final Log LOG = LogFactory.getLog(this.getClass());    
+  private final Logger LOG = LoggerFactory.getLogger(this.getClass());
 
   private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
     
@@ -126,7 +126,7 @@ public class GangliaContext extends AbstractMetricsContext {
         datagramSocket = new DatagramSocket();
       }
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
index 0cfd31d..6e803a3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
@@ -23,11 +23,11 @@ import java.net.DatagramPacket;
 import java.net.SocketAddress;
 import java.net.UnknownHostException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.net.DNS;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Context for sending metrics to Ganglia version 3.1.x.
@@ -42,8 +42,8 @@ public class GangliaContext31 extends GangliaContext {
 
   String hostName = "UNKNOWN.example.com";
 
-  private static final Log LOG = 
-    LogFactory.getLog("org.apache.hadoop.util.GangliaContext31");
+  private static final Logger LOG =
+      LoggerFactory.getLogger("org.apache.hadoop.util.GangliaContext31");
 
   public void init(String contextName, ContextFactory factory) {
     super.init(contextName, factory);
@@ -62,7 +62,7 @@ public class GangliaContext31 extends GangliaContext {
           conf.get("dfs.datanode.dns.interface","default"),
           conf.get("dfs.datanode.dns.nameserver","default"));
       } catch (UnknownHostException uhe) {
-        LOG.error(uhe);
+        LOG.error(uhe.toString());
     	hostName = "UNKNOWN.example.com";
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
index ed9d3c9..1b1b1ea 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
@@ -29,12 +29,12 @@ import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static java.lang.Thread.State.*;
 import java.lang.management.GarbageCollectorMXBean;
 import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 /**
  * Singleton class which reports Java Virtual Machine metrics to the metrics API.  
@@ -50,7 +50,7 @@ public class JvmMetrics implements Updater {
     
     private static final float M = 1024*1024;
     private static JvmMetrics theInstance = null;
-    private static Log log = LogFactory.getLog(JvmMetrics.class);
+    private static Logger log = LoggerFactory.getLogger(JvmMetrics.class);
     
     private MetricsRecord metrics;
     

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
index f9d3442..b04952e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
@@ -23,9 +23,6 @@ import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
@@ -33,6 +30,8 @@ import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * @deprecated Use org.apache.hadoop.metrics2 package instead.
@@ -42,7 +41,8 @@ import org.apache.hadoop.metrics.Updater;
 @InterfaceStability.Evolving
 public class CompositeContext extends AbstractMetricsContext {
 
-  private static final Log LOG = LogFactory.getLog(CompositeContext.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CompositeContext.class);
   private static final String ARITY_LABEL = "arity";
   private static final String SUB_FMT = "%s.sub%d";
   private final ArrayList<MetricsContext> subctxt =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
index 2199346..435ffff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.metrics.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The MetricsIntValue class is for a metric that is not time varied
@@ -34,8 +34,8 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 public class MetricsIntValue extends MetricsBase {  
 
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
+  private static final Logger LOG =
+      LoggerFactory.getLogger("org.apache.hadoop.metrics.util");
 
   private int value;
   private boolean changed;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
index 30a5f61..9661245 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.metrics.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The MetricsTimeVaryingInt class is for a metric that naturally
@@ -37,8 +37,8 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 public class MetricsTimeVaryingInt extends MetricsBase {
 
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
+  private static final Logger LOG =
+      LoggerFactory.getLogger("org.apache.hadoop.metrics.util");
   
   private int currentValue;
   private int previousIntervalValue;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
index ad2fdf6..666a1a1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.metrics.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The MetricsTimeVaryingLong class is for a metric that naturally
@@ -38,8 +38,8 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 public class MetricsTimeVaryingLong extends MetricsBase{
 
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
+  private static final Logger LOG =
+      LoggerFactory.getLogger("org.apache.hadoop.metrics.util");
  
   private long currentValue;
   private long previousIntervalValue;


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[2/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.

Posted by aa...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
index 92fb4ec..57359a0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
@@ -20,15 +20,16 @@ package org.apache.hadoop.io;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
 public class TestWritableUtils {
-  private static final Log LOG = LogFactory.getLog(TestWritableUtils.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestWritableUtils.class);
 
   private void testValue(int val, int vintlen) throws IOException {
     DataOutputBuffer buf = new DataOutputBuffer();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
index 9d16a0d..4b99def 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
@@ -68,18 +68,19 @@ import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.*;
 import static org.junit.Assume.assumeTrue;
 
 public class TestCodec {
 
-  private static final Log LOG= LogFactory.getLog(TestCodec.class);
+  private static final Logger LOG= LoggerFactory.getLogger(TestCodec.class);
 
   private Configuration conf = new Configuration();
   private int count = 10000;
@@ -373,7 +374,7 @@ public class TestCodec {
       }
       LOG.info("Wrote " + seq + " records to " + file);
     } finally {
-      IOUtils.cleanup(LOG, fout);
+      IOUtils.cleanupWithLogger(LOG, fout);
       CodecPool.returnCompressor(cmp);
     }
     return file;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java
index 7b55cac..645080b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java
@@ -24,8 +24,6 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -36,12 +34,15 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assume.assumeTrue;
 
 public class TestCompressionStreamReuse {
-  private static final Log LOG = LogFactory
-      .getLog(TestCompressionStreamReuse.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestCompressionStreamReuse.class);
 
   private Configuration conf = new Configuration();
   private int count = 10000;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
index 0f2c640..a3901a7 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
@@ -44,8 +44,6 @@ import static org.junit.Assume.*;
 import static org.junit.Assert.*;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileSystem;
@@ -54,12 +52,14 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.*;
 import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat.*;
 
 public class TestNativeIO {
-  static final Log LOG = LogFactory.getLog(TestNativeIO.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestNativeIO.class);
 
   static final File TEST_DIR = GenericTestUtils.getTestDir("testnativeio");
 
@@ -641,8 +641,8 @@ public class TestNativeIO {
       NativeIO.copyFileUnbuffered(srcFile, dstFile);
       Assert.assertEquals(srcFile.length(), dstFile.length());
     } finally {
-      IOUtils.cleanup(LOG, channel);
-      IOUtils.cleanup(LOG, raSrcFile);
+      IOUtils.cleanupWithLogger(LOG, channel);
+      IOUtils.cleanupWithLogger(LOG, raSrcFile);
       FileUtils.deleteQuietly(TEST_DIR);
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java
index 64abecd..fbe3fb8 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java
@@ -27,14 +27,15 @@ import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 import org.apache.commons.lang.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestSharedFileDescriptorFactory {
-  static final Log LOG = LogFactory.getLog(TestSharedFileDescriptorFactory.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(TestSharedFileDescriptorFactory.class);
 
   private static final File TEST_BASE = GenericTestUtils.getTestDir();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java
index 8fc852a..2fe1271 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.LongWritable;
@@ -34,6 +32,8 @@ import org.apache.hadoop.util.concurrent.AsyncGetFuture;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
@@ -49,7 +49,7 @@ import static org.junit.Assert.assertFalse;
 public class TestAsyncIPC {
 
   private static Configuration conf;
-  private static final Log LOG = LogFactory.getLog(TestAsyncIPC.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestAsyncIPC.class);
 
   static <T extends Writable> AsyncGetFuture<T, IOException>
       getAsyncRpcResponseFuture() {
@@ -185,7 +185,7 @@ public class TestAsyncIPC {
           final long param = TestIPC.RANDOM.nextLong();
           runCall(i, param);
         } catch (Exception e) {
-          LOG.fatal(String.format("Caller-%d Call-%d caught: %s", callerId, i,
+          LOG.error(String.format("Caller-%d Call-%d caught: %s", callerId, i,
               StringUtils.stringifyException(e)));
           failed = true;
         }
@@ -222,7 +222,7 @@ public class TestAsyncIPC {
       for (int i = start; i < end; i++) {
         LongWritable value = returnFutures.get(i).get();
         if (expectedValues.get(i) != value.get()) {
-          LOG.fatal(String.format("Caller-%d Call-%d failed!", callerId, i));
+          LOG.error(String.format("Caller-%d Call-%d failed!", callerId, i));
           failed = true;
           break;
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
index 63af6fc..2dce047 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
@@ -58,8 +58,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.net.SocketFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -96,12 +94,13 @@ import org.mockito.stubbing.Answer;
 import com.google.common.base.Supplier;
 import com.google.common.primitives.Bytes;
 import com.google.common.primitives.Ints;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 
 /** Unit tests for IPC. */
 public class TestIPC {
-  public static final Log LOG =
-    LogFactory.getLog(TestIPC.class);
+  public static final Logger LOG = LoggerFactory.getLogger(TestIPC.class);
   
   private static Configuration conf;
   final static int PING_INTERVAL = 1000;
@@ -230,12 +229,12 @@ public class TestIPC {
           final long param = RANDOM.nextLong();
           LongWritable value = call(client, param, server, conf);
           if (value.get() != param) {
-            LOG.fatal("Call failed!");
+            LOG.error("Call failed!");
             failed = true;
             break;
           }
         } catch (Exception e) {
-          LOG.fatal("Caught: " + StringUtils.stringifyException(e));
+          LOG.error("Caught: " + StringUtils.stringifyException(e));
           failed = true;
         }
       }
@@ -784,7 +783,7 @@ public class TestIPC {
             call(client, new LongWritable(Thread.currentThread().getId()),
                 addr, 60000, conf);
           } catch (Throwable e) {
-            LOG.error(e);
+            LOG.error(e.toString());
             failures.incrementAndGet();
             return;
           } finally {
@@ -895,7 +894,7 @@ public class TestIPC {
               callBarrier.await();
             }
           } catch (Throwable t) {
-            LOG.error(t);
+            LOG.error(t.toString());
             error.set(true); 
           } 
         }
@@ -917,7 +916,7 @@ public class TestIPC {
               callReturned.countDown();
               Thread.sleep(10000);
             } catch (IOException e) {
-              LOG.error(e);
+              LOG.error(e.toString());
             } catch (InterruptedException e) {
             } finally {
               client.stop();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
index 546cb8f..ea55f33 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
@@ -32,8 +32,6 @@ import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
@@ -45,6 +43,8 @@ import org.apache.hadoop.ipc.Server.Call;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This test provokes partial writes in the server, which is 
@@ -52,8 +52,8 @@ import org.junit.Test;
  */
 public class TestIPCServerResponder {
 
-  public static final Log LOG = 
-            LogFactory.getLog(TestIPCServerResponder.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestIPCServerResponder.class);
 
   private static Configuration conf = new Configuration();
 
@@ -126,7 +126,7 @@ public class TestIPCServerResponder {
           call(client, param, address);
           Thread.sleep(RANDOM.nextInt(20));
         } catch (Exception e) {
-          LOG.fatal("Caught Exception", e);
+          LOG.error("Caught Exception", e);
           failed = true;
         }
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java
index f5fefe4..476b197 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java
@@ -29,18 +29,18 @@ import java.util.concurrent.Future;
 import com.google.protobuf.BlockingService;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.protobuf.TestProtos;
 import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcHandoffProto;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestProtoBufRpcServerHandoff {
 
-  public static final Log LOG =
-      LogFactory.getLog(TestProtoBufRpcServerHandoff.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestProtoBufRpcServerHandoff.class);
 
   @Test(timeout = 20000)
   public void test() throws Exception {
@@ -83,8 +83,8 @@ public class TestProtoBufRpcServerHandoff {
     ClientInvocationCallable callable1 = future1.get();
     ClientInvocationCallable callable2 = future2.get();
 
-    LOG.info(callable1);
-    LOG.info(callable2);
+    LOG.info(callable1.toString());
+    LOG.info(callable2.toString());
 
     // Ensure the 5 second sleep responses are within a reasonable time of each
     // other.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
index 9ca662d..d08c4b4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.ipc;
 
 import com.google.common.base.Supplier;
 import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -54,6 +52,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.internal.util.reflection.Whitebox;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 
 import javax.net.SocketFactory;
@@ -104,7 +104,7 @@ import static org.mockito.Mockito.verify;
 @SuppressWarnings("deprecation")
 public class TestRPC extends TestRpcBase {
 
-  public static final Log LOG = LogFactory.getLog(TestRPC.class);
+  public static final Logger LOG = LoggerFactory.getLogger(TestRPC.class);
 
   @Before
   public void setup() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
index 2ac2be9..2984efd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
@@ -28,8 +28,6 @@ import java.net.InetSocketAddress;
 
 import org.junit.Assert;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
@@ -39,6 +37,8 @@ import org.apache.hadoop.net.NetUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Unit test for supporting method-name based compatible RPCs. */
 public class TestRPCCompatibility {
@@ -47,8 +47,8 @@ public class TestRPCCompatibility {
   private static RPC.Server server;
   private ProtocolProxy<?> proxy;
 
-  public static final Log LOG =
-    LogFactory.getLog(TestRPCCompatibility.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestRPCCompatibility.class);
   
   private static Configuration conf = new Configuration();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java
index 93af7d4..aee8893 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java
@@ -19,11 +19,11 @@
 package org.apache.hadoop.ipc;
 
 import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -42,7 +42,8 @@ import static org.junit.Assert.fail;
 @SuppressWarnings("deprecation")
 public class TestRPCServerShutdown extends TestRpcBase {
 
-  public static final Log LOG = LogFactory.getLog(TestRPCServerShutdown.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestRPCServerShutdown.class);
 
   @Before
   public void setup() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java
index 3716bc3..2e0b3da 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java
@@ -30,19 +30,19 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestRpcServerHandoff {
 
-  public static final Log LOG =
-      LogFactory.getLog(TestRpcServerHandoff.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestRpcServerHandoff.class);
 
   private static final String BIND_ADDRESS = "0.0.0.0";
   private static final Configuration conf = new Configuration();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
index 87210e8..8e9aa5c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.ipc;
 
 import com.google.protobuf.ServiceException;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -43,6 +41,8 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 
 import javax.security.auth.callback.*;
@@ -95,8 +95,7 @@ public class TestSaslRPC extends TestRpcBase {
     this.saslPropertiesResolver = saslPropertiesResolver;
   }
 
-  public static final Log LOG =
-    LogFactory.getLog(TestSaslRPC.class);
+  public static final Logger LOG = LoggerFactory.getLogger(TestSaslRPC.class);
   
   static final String ERROR_MESSAGE = "Token is invalid";
   static final String SERVER_KEYTAB_KEY = "test.ipc.server.keytab";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
index afda535..3141527 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
@@ -27,12 +27,12 @@ import java.net.BindException;
 import java.net.InetSocketAddress;
 import java.net.ServerSocket;
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.Server.Call;
 import org.junit.Test;
+import org.slf4j.Logger;
 
 /**
  * This is intended to be a set of unit tests for the 
@@ -136,7 +136,7 @@ public class TestServer {
   public void testLogExceptions() throws Exception {
     final Configuration conf = new Configuration();
     final Call dummyCall = new Call(0, 0, null, null);
-    Log logger = mock(Log.class);
+    Logger logger = mock(Logger.class);
     Server server = new Server("0.0.0.0", 0, LongWritable.class, 1, conf) {
       @Override
       public Writable call(
@@ -154,12 +154,12 @@ public class TestServer {
 
     // No stack trace should be logged for a terse exception.
     server.logException(logger, new TestException2(), dummyCall);
-    verify(logger, times(1)).info(anyObject());
+    verify(logger, times(1)).info(anyString());
 
     // Full stack trace should be logged for other exceptions.
     final Throwable te3 = new TestException3();
     server.logException(logger, te3, dummyCall);
-    verify(logger, times(1)).info(anyObject(), eq(te3));
+    verify(logger, times(1)).info(anyString(), eq(te3));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
index 6428176..d4bc06a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
@@ -21,14 +21,15 @@ package org.apache.hadoop.ipc;
 import static org.junit.Assert.assertEquals;
 import org.junit.Test;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.ipc.WeightedRoundRobinMultiplexer.IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY;
 
 public class TestWeightedRoundRobinMultiplexer {
-  public static final Log LOG = LogFactory.getLog(TestWeightedRoundRobinMultiplexer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestWeightedRoundRobinMultiplexer.class);
 
   private WeightedRoundRobinMultiplexer mux;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
index 7e094ed..7bc772f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
@@ -31,8 +31,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
@@ -47,9 +45,12 @@ import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30;
 import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31;
 import org.apache.hadoop.metrics2.sink.ganglia.GangliaMetricsTestHelper;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestGangliaMetrics {
-  public static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestMetricsSystemImpl.class);
   // This is the prefix to locate the config file for this particular test
   // This is to avoid using the same config file with other test cases,
   // which can cause race conditions.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
index 3bdc8af..18b8309 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
@@ -24,15 +24,16 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 
 import org.apache.commons.configuration.Configuration;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.apache.hadoop.metrics2.impl.ConfigUtil.*;
 
 /**
  * Test metrics configuration
  */
 public class TestMetricsConfig {
-  static final Log LOG = LogFactory.getLog(TestMetricsConfig.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestMetricsConfig.class);
 
   /**
    * Common use cases

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
index 0463f48..5cc7697 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
@@ -42,8 +42,6 @@ import com.google.common.base.Predicate;
 import com.google.common.collect.Iterables;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsException;
 import static org.apache.hadoop.test.MoreAsserts.*;
 
@@ -61,13 +59,16 @@ import org.apache.hadoop.metrics2.lib.MutableRate;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Test the MetricsSystemImpl class
  */
 @RunWith(MockitoJUnitRunner.class)
 public class TestMetricsSystemImpl {
-  private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestMetricsSystemImpl.class);
   
   static { DefaultMetricsSystem.setMiniClusterMode(true); }
   
@@ -78,7 +79,7 @@ public class TestMetricsSystemImpl {
   public static class TestSink implements MetricsSink {
 
     @Override public void putMetrics(MetricsRecord record) {
-      LOG.debug(record);
+      LOG.debug(record.toString());
     }
 
     @Override public void flush() {}
@@ -420,7 +421,7 @@ public class TestMetricsSystemImpl {
   }
 
   private void checkMetricsRecords(List<MetricsRecord> recs) {
-    LOG.debug(recs);
+    LOG.debug(recs.toString());
     MetricsRecord r = recs.get(0);
     assertEquals("name", "s1rec", r.name());
     assertEquals("tags", new MetricsTag[] {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java
index 7da8d1b..719130f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java
@@ -22,18 +22,20 @@ import java.util.ConcurrentModificationException;
 import java.util.concurrent.CountDownLatch;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import static org.apache.hadoop.metrics2.impl.SinkQueue.*;
 
 /**
  * Test the half-blocking metrics sink queue
  */
 public class TestSinkQueue {
-  private static final Log LOG = LogFactory.getLog(TestSinkQueue.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestSinkQueue.class);
 
   /**
    * Test common use case
@@ -234,7 +236,7 @@ public class TestSinkQueue {
       callback.run();
     }
     catch (ConcurrentModificationException e) {
-      LOG.info(e);
+      LOG.info(e.toString());
       return;
     }
     LOG.error("should've thrown CME");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java
index fd716ae..b0d7deb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java
@@ -34,18 +34,19 @@ import java.util.Map.Entry;
 import java.util.Random;
 import java.util.concurrent.CountDownLatch;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.util.Quantile;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Test metrics record builder interface and mutable metrics
  */
 public class TestMutableMetrics {
 
-  private static final Log LOG = LogFactory.getLog(TestMutableMetrics.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestMutableMetrics.class);
   private final double EPSILON = 1e-42;
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java
index 7bee3a2..e69947e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java
@@ -25,15 +25,17 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.apache.hadoop.metrics2.lib.Interns.*;
 
 public class TestMetricsCache {
-  private static final Log LOG = LogFactory.getLog(TestMetricsCache.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestMetricsCache.class);
 
   @SuppressWarnings("deprecation")
   @Test public void testUpdate() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java
index a294e74..80f2ebc 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java
@@ -18,16 +18,17 @@
 
 package org.apache.hadoop.net;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.net.ServerSocket;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 public class ServerSocketUtil {
 
-  private static final Log LOG = LogFactory.getLog(ServerSocketUtil.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ServerSocketUtil.class);
   private static Random rand = new Random();
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
index a0bfe73..663198a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
@@ -28,12 +28,12 @@ import java.net.InetAddress;
 import javax.naming.CommunicationException;
 import javax.naming.NameNotFoundException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.hamcrest.CoreMatchers.not;
 import static org.hamcrest.core.Is.is;
@@ -45,7 +45,7 @@ import static org.junit.Assume.assumeTrue;
  */
 public class TestDNS {
 
-  private static final Log LOG = LogFactory.getLog(TestDNS.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestDNS.class);
   private static final String DEFAULT = "default";
 
   // This is not a legal hostname (starts with a hyphen). It will never

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
index e59ac77..1c56c60 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
@@ -40,8 +40,6 @@ import java.util.concurrent.TimeUnit;
 import junit.framework.AssertionFailedError;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.NetUtilsTestResolver;
@@ -49,10 +47,12 @@ import org.junit.Assume;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestNetUtils {
 
-  private static final Log LOG = LogFactory.getLog(TestNetUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestNetUtils.class);
   private static final int DEST_PORT = 4040;
   private static final String DEST_PORT_NAME = Integer.toString(DEST_PORT);
   private static final int LOCAL_PORT = 8080;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
index 649ba12..f1c03cf 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
@@ -25,8 +25,6 @@ import java.net.SocketTimeoutException;
 import java.nio.channels.Pipe;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.MultithreadedTestUtil;
 import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
@@ -36,6 +34,9 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.io.nativeio.NativeIO;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.*;
 
 /**
@@ -47,7 +48,8 @@ import static org.junit.Assert.*;
  */
 public class TestSocketIOWithTimeout {
 
-  static Log LOG = LogFactory.getLog(TestSocketIOWithTimeout.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(TestSocketIOWithTimeout.class);
   
   private static int TIMEOUT = 1*1000; 
   private static String TEST_STRING = "1234567890";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
index f3c0a5c..a906c4a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.net;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -34,7 +34,8 @@ import java.util.Map;
  * Because the map is actually static, this map needs to be reset for every test
  */
 public class TestStaticMapping extends Assert {
-  private static final Log LOG = LogFactory.getLog(TestStaticMapping.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestStaticMapping.class);
 
   /**
    * Reset the map then create a new instance of the {@link StaticMapping}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
index 8a5a0a4..28b3cbe 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
@@ -203,7 +203,7 @@ public class TestDomainSocket {
         } catch (IOException e) {
           throw new RuntimeException("unexpected IOException", e);
         } finally {
-          IOUtils.cleanup(DomainSocket.LOG, serverConn);
+          IOUtils.cleanupWithLogger(DomainSocket.LOG, serverConn);
         }
         return null;
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
index 4cc86a7..aa522f2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
@@ -26,17 +26,18 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
 import com.google.common.util.concurrent.Uninterruptibles;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestDomainSocketWatcher {
-  static final Log LOG = LogFactory.getLog(TestDomainSocketWatcher.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(TestDomainSocketWatcher.class);
 
   private Throwable trappedException = null;
 
@@ -141,7 +142,7 @@ public class TestDomainSocketWatcher {
             }
           }
         } catch (Throwable e) {
-          LOG.error(e);
+          LOG.error(e.toString());
           throw new RuntimeException(e);
         }
       }
@@ -169,7 +170,7 @@ public class TestDomainSocketWatcher {
             }
           }
         } catch (Throwable e) {
-          LOG.error(e);
+          LOG.error(e.toString());
           throw new RuntimeException(e);
         }
       }
@@ -212,7 +213,7 @@ public class TestDomainSocketWatcher {
             TimeUnit.MILLISECONDS.sleep(1);
           }
         } catch (Throwable e) {
-          LOG.error(e);
+          LOG.error(e.toString());
           throw new RuntimeException(e);
         }
       }
@@ -241,7 +242,7 @@ public class TestDomainSocketWatcher {
             }
           }
         } catch (Throwable e) {
-          LOG.error(e);
+          LOG.error(e.toString());
           throw new RuntimeException(e);
         }
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java
index 79f56e0..0a2d42c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java
@@ -24,16 +24,17 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class TestCompositeGroupMapping {
-  public static final Log LOG = LogFactory.getLog(TestCompositeGroupMapping.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestCompositeGroupMapping.class);
   private static Configuration conf = new Configuration();
   
   private static class TestUser {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
index 50d389c..8cfd9fc 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
@@ -44,8 +44,9 @@ import org.junit.Test;
 import org.apache.hadoop.ipc.TestRpcBase.TestTokenSecretManager;
 import org.apache.hadoop.ipc.TestRpcBase.TestTokenIdentifier;
 import org.apache.hadoop.ipc.TestRpcBase.TestTokenSelector;
-import org.apache.commons.logging.*;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
@@ -63,8 +64,8 @@ public class TestDoAsEffectiveUser {
   private static final Configuration masterConf = new Configuration();
   
   
-  public static final Log LOG = LogFactory
-      .getLog(TestDoAsEffectiveUser.class);
+  public static final Logger LOG = LoggerFactory
+      .getLogger(TestDoAsEffectiveUser.class);
   
   
   static {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java
index 85f17b1..3ef3698 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java
@@ -21,16 +21,17 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 
 public class TestGroupFallback {
-  public static final Log LOG = LogFactory.getLog(TestGroupFallback.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestGroupFallback.class);
 
   @Test
   public void testGroupShell() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
index d51d63b..f015021 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
@@ -40,17 +40,17 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class TestGroupsCaching {
-  public static final Log TESTLOG = LogFactory.getLog(TestGroupsCaching.class);
+  public static final Logger TESTLOG =
+      LoggerFactory.getLogger(TestGroupsCaching.class);
   private static String[] myGroups = {"grp1", "grp2"};
   private Configuration conf;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java
index 6d9ea08..d3c9538 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.security;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.test.GenericTestUtils;
@@ -30,6 +28,8 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.doNothing;
@@ -38,8 +38,8 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 public class TestShellBasedUnixGroupsMapping {
-  private static final Log TESTLOG =
-      LogFactory.getLog(TestShellBasedUnixGroupsMapping.class);
+  private static final Logger TESTLOG =
+      LoggerFactory.getLogger(TestShellBasedUnixGroupsMapping.class);
 
   private final GenericTestUtils.LogCapturer shellMappingLog =
       GenericTestUtils.LogCapturer.captureLogs(

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
index 2c866ae..ee7e42c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
@@ -23,8 +23,6 @@ import java.net.URI;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -39,13 +37,16 @@ import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 public class TestCredentialProviderFactory {
-  public static final Log LOG = LogFactory.getLog(TestCredentialProviderFactory.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestCredentialProviderFactory.class);
 
   @Rule
   public final TestName test = new TestName();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
index 04efc33..fe00949 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
@@ -26,8 +26,6 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +34,8 @@ import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
@@ -45,8 +45,8 @@ import static org.mockito.Mockito.verify;
 @InterfaceStability.Evolving
 public class TestAccessControlList {
 
-  private static final Log LOG =
-    LogFactory.getLog(TestAccessControlList.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestAccessControlList.class);
 
   /**
    * Test the netgroups (groups in ACL rules that start with @)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
index 577f11b..9061fe7 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
@@ -25,8 +25,6 @@ import java.security.SecureRandom;
 import java.util.Arrays;
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.Groups;
@@ -34,11 +32,13 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class TestProxyUsers {
-  private static final Log LOG =
-    LogFactory.getLog(TestProxyUsers.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestProxyUsers.class);
   private static final String REAL_USER_NAME = "proxier";
   private static final String PROXY_USER_NAME = "proxied_user";
   private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
index b41ff15..ad12f0b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
@@ -32,8 +32,6 @@ import java.util.Map;
 
 import org.junit.Assert;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.Text;
@@ -49,11 +47,14 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecret
 import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.Time;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.*;
 
 public class TestDelegationToken {
-  private static final Log LOG = LogFactory.getLog(TestDelegationToken.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestDelegationToken.class);
   private static final Text KIND = new Text("MY KIND");
 
   public static class TestDelegationTokenIdentifier 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
index 6189c0e..ad3dfcf 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.service;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.service.Service.STATE;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -36,7 +36,8 @@ public class TestCompositeService {
 
   private static final int FAILED_SERVICE_SEQ_NUMBER = 2;
 
-  private static final Log LOG  = LogFactory.getLog(TestCompositeService.class);
+  private static final Logger LOG  =
+      LoggerFactory.getLogger(TestCompositeService.class);
 
   /**
    * flag to state policy of CompositeService, and hence

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java
index cf9ca32..f72e130 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java
@@ -19,8 +19,6 @@
 
 package org.apache.hadoop.service;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.LoggingStateChangeListener;
@@ -28,9 +26,12 @@ import org.apache.hadoop.service.Service;
 import org.apache.hadoop.service.ServiceStateChangeListener;
 import org.apache.hadoop.service.ServiceStateException;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestServiceLifecycle extends ServiceAssert {
-  private static Log LOG = LogFactory.getLog(TestServiceLifecycle.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestServiceLifecycle.class);
 
   /**
    * Walk the {@link BreakableService} through it's lifecycle, 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
index d84c9c8..21d38be 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
@@ -32,8 +32,6 @@ import org.mockito.invocation.InvocationOnMock;
 import org.mockito.ArgumentCaptor;
 import org.mockito.ArgumentMatcher;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsCollector;
 import org.apache.hadoop.metrics2.MetricsSource;
@@ -42,6 +40,8 @@ import org.apache.hadoop.metrics2.MetricsSystem;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MutableQuantiles;
 import org.apache.hadoop.metrics2.util.Quantile;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.metrics2.lib.Interns.*;
 
@@ -50,7 +50,7 @@ import static org.apache.hadoop.metrics2.lib.Interns.*;
  */
 public class MetricsAsserts {
 
-  final static Log LOG = LogFactory.getLog(MetricsAsserts.class);
+  final static Logger LOG = LoggerFactory.getLogger(MetricsAsserts.class);
   private static final double EPSILON = 0.00001;
 
   public static MetricsSystem mockMetricsSystem() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
index e0bc136..514e9c0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.test;
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A utility to easily test threaded/synchronized code.
@@ -60,8 +60,8 @@ import org.apache.hadoop.util.Time;
  */
 public abstract class MultithreadedTestUtil {
 
-  public static final Log LOG =
-    LogFactory.getLog(MultithreadedTestUtil.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(MultithreadedTestUtil.class);
 
   /**
    * TestContext is used to setup the multithreaded test runner.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java
index d6ae04d..4c0b965 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.test;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestJUnitSetup {
-  public static final Log LOG = LogFactory.getLog(TestJUnitSetup.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestJUnitSetup.class);
 
   @Test
   public void testJavaAssert() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
index 075ef69..58935f2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
@@ -19,17 +19,18 @@ package org.apache.hadoop.util;
 
 import junit.framework.TestCase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.AsyncDiskService;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A test for AsyncDiskService.
  */
 public class TestAsyncDiskService extends TestCase {
   
-  public static final Log LOG = LogFactory.getLog(TestAsyncDiskService.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestAsyncDiskService.class);
   
   // Access by multiple threads from the ThreadPools in AsyncDiskService.
   volatile int count;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
index a38c3d7..e6a6cb1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
@@ -28,21 +28,22 @@ import java.util.jar.Attributes;
 import java.util.jar.JarFile;
 import java.util.jar.Manifest;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Tests covering the classpath command-line utility.
  */
 public class TestClasspath {
 
-  private static final Log LOG = LogFactory.getLog(TestClasspath.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestClasspath.class);
   private static final File TEST_DIR = GenericTestUtils.getTestDir(
       "TestClasspath");
   private static final Charset UTF8 = Charset.forName("UTF-8");
@@ -75,7 +76,7 @@ public class TestClasspath {
   public void tearDown() {
     System.setOut(oldStdout);
     System.setErr(oldStderr);
-    IOUtils.cleanup(LOG, printStdout, printStderr);
+    IOUtils.cleanupWithLogger(LOG, printStdout, printStderr);
     assertTrue(FileUtil.fullyDelete(TEST_DIR));
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java
index bd74855..2c27b76 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java
@@ -23,14 +23,15 @@ import java.util.List;
 
 import org.junit.Assert;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.IdentityHashStore;
 import org.apache.hadoop.util.IdentityHashStore.Visitor;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestIdentityHashStore {
-  private static final Log LOG = LogFactory.getLog(TestIdentityHashStore.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestIdentityHashStore.class.getName());
 
   private static class Key {
     private final String name;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java
index 671dd37..3751253 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java
@@ -21,15 +21,16 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Testing {@link LightWeightGSet} */
 public class TestLightWeightGSet {
-  public static final Log LOG = LogFactory.getLog(TestLightWeightGSet.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestLightWeightGSet.class);
 
   private static ArrayList<Integer> getRandomList(int length, int randomSeed) {
     Random random = new Random(randomSeed);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
index 3250092..19f213d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
@@ -23,15 +23,16 @@ import java.util.Iterator;
 import java.util.Random;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.*;
 
 /** Testing {@link LightWeightResizableGSet} */
 public class TestLightWeightResizableGSet {
-  public static final Log LOG = LogFactory.getLog(TestLightWeightResizableGSet.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestLightWeightResizableGSet.class);
   private Random random = new Random();
 
   private TestElement[] generateElements(int length) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
index 473c177..58874fd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
@@ -20,16 +20,16 @@ package org.apache.hadoop.util;
 import org.junit.Test;
 import static org.junit.Assert.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.crypto.OpensslCipher;
 import org.apache.hadoop.io.compress.Lz4Codec;
 import org.apache.hadoop.io.compress.SnappyCodec;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestNativeCodeLoader {
-  static final Log LOG = LogFactory.getLog(TestNativeCodeLoader.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestNativeCodeLoader.class);
 
   private static boolean requireTestJni() {
     String rtj = System.getProperty("require.test.libhadoop");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
index 73323ea..a9fa4c6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
@@ -19,21 +19,22 @@
 package org.apache.hadoop.util;
 
 import org.apache.commons.lang.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestSignalLogger {
-  public static final Log LOG = LogFactory.getLog(TestSignalLogger.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(TestSignalLogger.class);
   
   @Test(timeout=60000)
   public void testInstall() throws Exception {
     Assume.assumeTrue(SystemUtils.IS_OS_UNIX);
-    SignalLogger.INSTANCE.register(LOG);
+    SignalLogger.INSTANCE.register(LogAdapter.create(LOG));
     try {
-      SignalLogger.INSTANCE.register(LOG);
+      SignalLogger.INSTANCE.register(LogAdapter.create(LOG));
       Assert.fail("expected IllegalStateException from double registration");
     } catch (IllegalStateException e) {
       // fall through

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index cfa97f4..3e3bf07 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -29,13 +29,13 @@ import java.io.FileWriter;
 import java.io.IOException;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.hamcrest.CoreMatchers.*;
 
@@ -44,7 +44,7 @@ import static org.hamcrest.CoreMatchers.*;
  */
 public class TestWinUtils {
 
-  private static final Log LOG = LogFactory.getLog(TestWinUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestWinUtils.class);
   private static File TEST_DIR = GenericTestUtils.getTestDir(
       TestWinUtils.class.getSimpleName());
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
index 1ef0085..8c13b4f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.mount;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.SimpleTcpServer;
 import org.apache.hadoop.oncrpc.SimpleUdpServer;
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.util.ExitUtil.terminate;
 
@@ -37,7 +37,7 @@ import static org.apache.hadoop.util.ExitUtil.terminate;
  * handle for requested directory and returns it to the client.
  */
 abstract public class MountdBase {
-  public static final Log LOG = LogFactory.getLog(MountdBase.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MountdBase.class);
   private final RpcProgram rpcProgram;
   private int udpBoundPort; // Will set after server starts
   private int tcpBoundPort; // Will set after server starts
@@ -63,7 +63,7 @@ abstract public class MountdBase {
     try {
       udpServer.run();
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the UDP server.", e);
+      LOG.error("Failed to start the UDP server.", e);
       if (udpServer.getBoundPort() > 0) {
         rpcProgram.unregister(PortmapMapping.TRANSPORT_UDP,
             udpServer.getBoundPort());
@@ -82,7 +82,7 @@ abstract public class MountdBase {
     try {
       tcpServer.run();
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the TCP server.", e);
+      LOG.error("Failed to start the TCP server.", e);
       if (tcpServer.getBoundPort() > 0) {
         rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP,
             tcpServer.getBoundPort());
@@ -103,7 +103,7 @@ abstract public class MountdBase {
         rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort);
         rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
       } catch (Throwable e) {
-        LOG.fatal("Failed to register the MOUNT service.", e);
+        LOG.error("Failed to register the MOUNT service.", e);
         terminate(1, e);
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
index a299ff0..3d5088d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.util.SubnetUtils;
 import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +33,8 @@ import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
 import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class provides functionality for loading and checking the mapping 
@@ -64,7 +64,7 @@ public class NfsExports {
     return exports;
   }
   
-  public static final Log LOG = LogFactory.getLog(NfsExports.class);
+  public static final Logger LOG = LoggerFactory.getLogger(NfsExports.class);
   
   // only support IPv4 now
   private static final String IP_ADDRESS = 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org