You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2017/07/18 04:33:33 UTC

[6/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common. Contributed by Wenxin He.

HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common. Contributed by Wenxin He.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ccaf0366
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ccaf0366
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ccaf0366

Branch: refs/heads/trunk
Commit: ccaf036662e22da14583942054898c99fa51dae5
Parents: 5b00792
Author: Akira Ajisaka <aa...@apache.org>
Authored: Tue Jul 18 13:32:37 2017 +0900
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Tue Jul 18 13:32:37 2017 +0900

----------------------------------------------------------------------
 .../org/apache/hadoop/conf/Configuration.java   | 17 +++++++++--------
 .../apache/hadoop/conf/ReconfigurableBase.java  |  7 ++++---
 .../hadoop/conf/ReconfigurationServlet.java     |  8 ++++----
 .../hadoop/crypto/JceAesCtrCryptoCodec.java     |  8 ++++----
 .../hadoop/crypto/OpensslAesCtrCryptoCodec.java |  8 ++++----
 .../org/apache/hadoop/crypto/OpensslCipher.java |  8 ++++----
 .../crypto/random/OpensslSecureRandom.java      |  8 ++++----
 .../hadoop/crypto/random/OsSecureRandom.java    |  9 +++++----
 .../apache/hadoop/fs/AbstractFileSystem.java    |  6 +++---
 .../java/org/apache/hadoop/fs/ChecksumFs.java   |  8 ++++----
 .../hadoop/fs/DelegationTokenRenewer.java       | 10 +++++-----
 .../org/apache/hadoop/fs/FSInputChecker.java    |  9 +++++----
 .../java/org/apache/hadoop/fs/FileContext.java  | 10 +++++-----
 .../java/org/apache/hadoop/fs/FileUtil.java     | 10 +++++-----
 .../main/java/org/apache/hadoop/fs/FsShell.java |  6 +++---
 .../apache/hadoop/fs/FsShellPermissions.java    |  4 ++--
 .../main/java/org/apache/hadoop/fs/Globber.java |  7 ++++---
 .../org/apache/hadoop/fs/HarFileSystem.java     | 11 ++++++-----
 .../org/apache/hadoop/fs/LocalDirAllocator.java |  9 +++++----
 .../main/java/org/apache/hadoop/fs/Trash.java   |  7 ++++---
 .../apache/hadoop/fs/TrashPolicyDefault.java    |  8 ++++----
 .../org/apache/hadoop/fs/ftp/FTPFileSystem.java |  8 ++++----
 .../hadoop/fs/permission/FsPermission.java      |  6 +++---
 .../hadoop/fs/sftp/SFTPConnectionPool.java      |  7 ++++---
 .../apache/hadoop/fs/sftp/SFTPFileSystem.java   |  7 ++++---
 .../org/apache/hadoop/fs/shell/Command.java     |  6 +++---
 .../apache/hadoop/ha/ActiveStandbyElector.java  | 15 ++++++++-------
 .../apache/hadoop/ha/FailoverController.java    | 10 +++++-----
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  8 ++++----
 .../org/apache/hadoop/ha/HealthMonitor.java     |  8 ++++----
 .../java/org/apache/hadoop/ha/NodeFencer.java   |  6 +++---
 .../org/apache/hadoop/ha/SshFenceByTcpPort.java | 11 +++--------
 .../apache/hadoop/ha/ZKFailoverController.java  | 20 ++++++++++----------
 ...HAServiceProtocolServerSideTranslatorPB.java |  6 +++---
 .../org/apache/hadoop/http/HttpServer2.java     |  6 +++---
 .../hadoop/http/lib/StaticUserWebFilter.java    |  7 ++++---
 .../java/org/apache/hadoop/io/BloomMapFile.java |  6 +++---
 .../apache/hadoop/io/FastByteComparisons.java   |  7 +++----
 .../main/java/org/apache/hadoop/io/IOUtils.java |  5 ++---
 .../main/java/org/apache/hadoop/io/MapFile.java |  8 ++++----
 .../org/apache/hadoop/io/ReadaheadPool.java     |  6 +++---
 .../java/org/apache/hadoop/io/SequenceFile.java |  7 ++++---
 .../main/java/org/apache/hadoop/io/UTF8.java    |  5 +++--
 .../apache/hadoop/io/compress/CodecPool.java    |  6 +++---
 .../io/compress/CompressionCodecFactory.java    |  8 ++++----
 .../apache/hadoop/io/compress/DefaultCodec.java |  6 +++---
 .../io/compress/bzip2/Bzip2Compressor.java      |  8 ++++----
 .../io/compress/bzip2/Bzip2Decompressor.java    |  8 ++++----
 .../hadoop/io/compress/bzip2/Bzip2Factory.java  |  6 +++---
 .../hadoop/io/compress/lz4/Lz4Compressor.java   |  8 ++++----
 .../hadoop/io/compress/lz4/Lz4Decompressor.java |  8 ++++----
 .../io/compress/snappy/SnappyCompressor.java    |  8 ++++----
 .../io/compress/snappy/SnappyDecompressor.java  |  8 ++++----
 .../io/compress/zlib/BuiltInZlibDeflater.java   |  8 ++++----
 .../hadoop/io/compress/zlib/ZlibCompressor.java |  8 ++++----
 .../hadoop/io/compress/zlib/ZlibFactory.java    |  8 ++++----
 .../apache/hadoop/io/erasurecode/CodecUtil.java |  6 +++---
 .../io/erasurecode/ErasureCodeNative.java       |  8 ++++----
 .../org/apache/hadoop/io/file/tfile/BCFile.java |  6 +++---
 .../hadoop/io/file/tfile/Compression.java       |  6 +++---
 .../org/apache/hadoop/io/file/tfile/TFile.java  |  8 ++++----
 .../hadoop/io/file/tfile/TFileDumper.java       |  8 ++++----
 .../org/apache/hadoop/io/nativeio/NativeIO.java | 16 ++++++++--------
 .../nativeio/SharedFileDescriptorFactory.java   |  7 ++++---
 .../apache/hadoop/io/retry/RetryPolicies.java   |  6 +++---
 .../org/apache/hadoop/io/retry/RetryUtils.java  |  6 +++---
 .../io/serializer/SerializationFactory.java     |  8 ++++----
 .../org/apache/hadoop/ipc/CallQueueManager.java |  7 ++++---
 .../main/java/org/apache/hadoop/ipc/Client.java |  6 +++---
 .../org/apache/hadoop/ipc/FairCallQueue.java    |  6 +++---
 .../apache/hadoop/ipc/ProtobufRpcEngine.java    |  7 ++++---
 .../main/java/org/apache/hadoop/ipc/RPC.java    |  6 +++---
 .../org/apache/hadoop/ipc/RefreshRegistry.java  |  7 ++++---
 .../java/org/apache/hadoop/ipc/RetryCache.java  |  6 +++---
 .../main/java/org/apache/hadoop/ipc/Server.java | 16 ++++++++--------
 .../ipc/WeightedRoundRobinMultiplexer.java      |  8 ++++----
 .../apache/hadoop/ipc/WritableRpcEngine.java    |  6 +++---
 .../hadoop/ipc/metrics/RetryCacheMetrics.java   |  6 +++---
 .../hadoop/ipc/metrics/RpcDetailedMetrics.java  |  8 ++++----
 .../apache/hadoop/ipc/metrics/RpcMetrics.java   |  6 +++---
 .../org/apache/hadoop/jmx/JMXJsonServlet.java   |  7 ++++---
 .../hadoop/metrics2/impl/MBeanInfoBuilder.java  |  2 +-
 .../hadoop/metrics2/impl/MetricsConfig.java     |  8 ++++----
 .../metrics2/impl/MetricsSinkAdapter.java       |  9 +++++----
 .../metrics2/impl/MetricsSourceAdapter.java     |  7 ++++---
 .../hadoop/metrics2/impl/MetricsSystemImpl.java |  6 +++---
 .../hadoop/metrics2/lib/MethodMetric.java       |  7 ++++---
 .../metrics2/lib/MetricsSourceBuilder.java      |  7 ++++---
 .../metrics2/lib/MutableMetricsFactory.java     |  7 ++++---
 .../hadoop/metrics2/lib/MutableRates.java       |  7 +++----
 .../lib/MutableRatesWithAggregation.java        |  7 ++++---
 .../hadoop/metrics2/sink/GraphiteSink.java      |  7 ++++---
 .../sink/ganglia/AbstractGangliaSink.java       | 10 +++++-----
 .../metrics2/sink/ganglia/GangliaSink30.java    |  6 +++---
 .../metrics2/sink/ganglia/GangliaSink31.java    |  7 ++++---
 .../org/apache/hadoop/metrics2/util/MBeans.java |  6 +++---
 .../hadoop/metrics2/util/MetricsCache.java      |  6 +++---
 .../main/java/org/apache/hadoop/net/DNS.java    |  6 +++---
 .../java/org/apache/hadoop/net/NetUtils.java    |  6 +++---
 .../apache/hadoop/net/ScriptBasedMapping.java   |  8 ++++----
 .../apache/hadoop/net/SocketIOWithTimeout.java  |  6 +++---
 .../org/apache/hadoop/net/TableMapping.java     |  6 +++---
 .../apache/hadoop/net/unix/DomainSocket.java    | 10 +++++-----
 .../hadoop/net/unix/DomainSocketWatcher.java    | 12 ++++++------
 .../AuthenticationWithProxyUserFilter.java      |  8 ++++----
 .../hadoop/security/CompositeGroupsMapping.java |  7 ++++---
 .../org/apache/hadoop/security/Credentials.java | 10 +++++-----
 .../java/org/apache/hadoop/security/Groups.java |  7 +++----
 .../HttpCrossOriginFilterInitializer.java       |  8 ++++----
 .../security/JniBasedUnixGroupsMapping.java     |  8 ++++----
 .../JniBasedUnixGroupsMappingWithFallback.java  |  8 ++++----
 .../JniBasedUnixGroupsNetgroupMapping.java      |  6 +++---
 ...edUnixGroupsNetgroupMappingWithFallback.java |  8 ++++----
 .../hadoop/security/LdapGroupsMapping.java      |  7 ++++---
 .../apache/hadoop/security/ProviderUtils.java   |  7 ++++---
 .../apache/hadoop/security/SaslInputStream.java |  7 ++++---
 .../apache/hadoop/security/SaslRpcClient.java   |  7 ++++---
 .../apache/hadoop/security/SaslRpcServer.java   |  6 +++---
 .../apache/hadoop/security/SecurityUtil.java    |  9 ++++-----
 .../hadoop/security/ShellBasedIdMapping.java    |  8 ++++----
 .../ShellBasedUnixGroupsNetgroupMapping.java    |  8 ++++----
 .../hadoop/security/WhitelistBasedResolver.java |  7 ++++---
 .../alias/AbstractJavaKeyStoreProvider.java     |  6 +++---
 .../authorize/ServiceAuthorizationManager.java  |  9 +++++----
 .../hadoop/security/http/CrossOriginFilter.java |  7 ++++---
 .../security/ssl/FileBasedKeyStoresFactory.java |  8 ++++----
 .../security/ssl/ReloadingX509TrustManager.java |  7 ++++---
 .../hadoop/security/token/DtFileOperations.java |  7 ++++---
 .../hadoop/security/token/DtUtilShell.java      |  6 +++---
 .../org/apache/hadoop/security/token/Token.java |  6 +++---
 .../AbstractDelegationTokenSecretManager.java   |  8 ++++----
 .../apache/hadoop/service/AbstractService.java  |  9 +++++----
 .../apache/hadoop/service/CompositeService.java |  7 ++++---
 .../service/LoggingStateChangeListener.java     | 11 ++++++-----
 .../hadoop/service/ServiceOperations.java       |  5 +++--
 .../tracing/TracerConfigurationManager.java     |  8 ++++----
 .../hadoop/util/ApplicationClassLoader.java     | 10 +++++-----
 .../apache/hadoop/util/AsyncDiskService.java    |  7 ++++---
 .../apache/hadoop/util/CombinedIPWhiteList.java |  7 ++++---
 .../org/apache/hadoop/util/FileBasedIPList.java | 11 ++++++-----
 .../main/java/org/apache/hadoop/util/GSet.java  |  6 +++---
 .../hadoop/util/GenericOptionsParser.java       |  7 ++++---
 .../org/apache/hadoop/util/HostsFileReader.java |  9 +++++----
 .../apache/hadoop/util/IntrusiveCollection.java |  7 ++++---
 .../org/apache/hadoop/util/JvmPauseMonitor.java |  6 +++---
 .../org/apache/hadoop/util/MachineList.java     |  6 +++---
 .../apache/hadoop/util/NativeCodeLoader.java    |  8 ++++----
 .../hadoop/util/NodeHealthScriptRunner.java     |  7 ++++---
 .../java/org/apache/hadoop/util/Progress.java   |  6 +++---
 .../apache/hadoop/util/ShutdownHookManager.java |  7 ++++---
 .../hadoop/util/ShutdownThreadsHelper.java      |  7 ++++---
 .../org/apache/hadoop/util/SysInfoLinux.java    |  8 ++++----
 .../org/apache/hadoop/util/SysInfoWindows.java  |  7 ++++---
 .../java/org/apache/hadoop/util/ThreadUtil.java |  7 +++----
 .../org/apache/hadoop/util/VersionInfo.java     |  8 ++++----
 .../hadoop/util/concurrent/AsyncGetFuture.java  |  7 ++++---
 .../hadoop/util/concurrent/ExecutorHelper.java  |  8 ++++----
 .../HadoopScheduledThreadPoolExecutor.java      |  8 ++++----
 .../concurrent/HadoopThreadPoolExecutor.java    |  8 ++++----
 .../hadoop/crypto/CryptoStreamsTestBase.java    |  6 +++---
 .../apache/hadoop/crypto/TestCryptoCodec.java   |  7 ++++---
 .../apache/hadoop/fs/FCStatisticsBaseTest.java  |  7 ++++---
 .../org/apache/hadoop/fs/TestFileContext.java   |  7 ++++---
 .../org/apache/hadoop/fs/TestFileStatus.java    |  8 ++++----
 .../java/org/apache/hadoop/fs/TestFileUtil.java |  6 +++---
 .../org/apache/hadoop/fs/TestFsShellCopy.java   |  6 +++---
 .../apache/hadoop/fs/TestFsShellReturnCode.java |  8 ++++----
 .../org/apache/hadoop/fs/TestFsShellTouch.java  |  6 +++---
 .../org/apache/hadoop/fs/TestHarFileSystem.java |  7 ++++---
 .../fs/contract/AbstractBondedFSContract.java   |  8 ++++----
 .../hadoop/fs/loadGenerator/LoadGenerator.java  | 10 +++++-----
 .../hadoop/ha/ActiveStandbyElectorTestUtil.java |  6 +++---
 .../org/apache/hadoop/ha/DummyHAService.java    |  7 ++++---
 .../org/apache/hadoop/ha/MiniZKFCCluster.java   |  7 ++++---
 .../java/org/apache/hadoop/ha/TestHAAdmin.java  |  6 +++---
 .../org/apache/hadoop/ha/TestHealthMonitor.java |  6 +++---
 .../apache/hadoop/http/TestGlobalFilter.java    |  6 +++---
 .../org/apache/hadoop/http/TestHttpServer.java  |  6 +++---
 .../apache/hadoop/http/TestHttpServerLogs.java  |  6 +++---
 .../hadoop/http/TestHttpServerWebapps.java      |  9 +++++----
 .../hadoop/http/TestHttpServerWithSpengo.java   |  7 ++++---
 .../org/apache/hadoop/http/TestPathFilter.java  |  6 +++---
 .../apache/hadoop/http/TestSSLHttpServer.java   |  7 ++++---
 .../apache/hadoop/http/TestServletFilter.java   |  6 +++---
 .../hadoop/http/resource/JerseyResource.java    |  6 +++---
 .../org/apache/hadoop/io/TestArrayFile.java     |  7 +++++--
 .../hadoop/io/TestDefaultStringifier.java       |  8 +++++---
 .../org/apache/hadoop/io/TestSequenceFile.java  |  7 ++++---
 .../java/org/apache/hadoop/io/TestSetFile.java  |  7 +++----
 .../org/apache/hadoop/io/TestWritableUtils.java |  7 ++++---
 .../apache/hadoop/io/compress/TestCodec.java    |  8 ++++----
 .../io/compress/TestCompressionStreamReuse.java |  9 +++++----
 .../apache/hadoop/io/nativeio/TestNativeIO.java | 10 +++++-----
 .../TestSharedFileDescriptorFactory.java        |  7 ++++---
 .../org/apache/hadoop/ipc/TestAsyncIPC.java     | 10 +++++-----
 .../java/org/apache/hadoop/ipc/TestIPC.java     | 17 ++++++++---------
 .../hadoop/ipc/TestIPCServerResponder.java      | 10 +++++-----
 .../ipc/TestProtoBufRpcServerHandoff.java       | 12 ++++++------
 .../java/org/apache/hadoop/ipc/TestRPC.java     |  6 +++---
 .../apache/hadoop/ipc/TestRPCCompatibility.java |  8 ++++----
 .../hadoop/ipc/TestRPCServerShutdown.java       |  7 ++++---
 .../apache/hadoop/ipc/TestRpcServerHandoff.java |  8 ++++----
 .../java/org/apache/hadoop/ipc/TestSaslRPC.java |  7 +++----
 .../java/org/apache/hadoop/ipc/TestServer.java  |  4 ++--
 .../ipc/TestWeightedRoundRobinMultiplexer.java  |  7 ++++---
 .../metrics2/impl/TestGangliaMetrics.java       |  7 ++++---
 .../hadoop/metrics2/impl/TestMetricsConfig.java |  7 ++++---
 .../metrics2/impl/TestMetricsSystemImpl.java    | 13 +++++++------
 .../hadoop/metrics2/impl/TestSinkQueue.java     | 10 ++++++----
 .../hadoop/metrics2/lib/TestMutableMetrics.java |  7 ++++---
 .../hadoop/metrics2/util/TestMetricsCache.java  |  8 +++++---
 .../org/apache/hadoop/net/ServerSocketUtil.java |  9 +++++----
 .../java/org/apache/hadoop/net/TestDNS.java     |  6 +++---
 .../org/apache/hadoop/net/TestNetUtils.java     |  6 +++---
 .../hadoop/net/TestSocketIOWithTimeout.java     |  8 +++++---
 .../apache/hadoop/net/TestStaticMapping.java    |  7 ++++---
 .../hadoop/net/unix/TestDomainSocket.java       |  2 +-
 .../net/unix/TestDomainSocketWatcher.java       | 15 ++++++++-------
 .../security/TestCompositeGroupMapping.java     |  7 ++++---
 .../hadoop/security/TestDoAsEffectiveUser.java  |  8 ++++----
 .../hadoop/security/TestGroupFallback.java      |  7 ++++---
 .../hadoop/security/TestGroupsCaching.java      |  8 ++++----
 .../TestShellBasedUnixGroupsMapping.java        |  8 ++++----
 .../alias/TestCredentialProviderFactory.java    |  7 ++++---
 .../authorize/TestAccessControlList.java        |  8 ++++----
 .../security/authorize/TestProxyUsers.java      |  8 ++++----
 .../token/delegation/TestDelegationToken.java   |  7 ++++---
 .../hadoop/service/TestCompositeService.java    |  7 ++++---
 .../hadoop/service/TestServiceLifecycle.java    |  7 ++++---
 .../org/apache/hadoop/test/MetricsAsserts.java  |  6 +++---
 .../hadoop/test/MultithreadedTestUtil.java      |  8 ++++----
 .../hadoop/test/TestGenericTestUtils.java       |  5 +----
 .../org/apache/hadoop/test/TestJUnitSetup.java  |  7 ++++---
 .../hadoop/util/TestAsyncDiskService.java       |  7 ++++---
 .../org/apache/hadoop/util/TestClasspath.java   |  9 +++++----
 .../org/apache/hadoop/util/TestFindClass.java   |  7 ++++---
 .../hadoop/util/TestIdentityHashStore.java      |  7 ++++---
 .../apache/hadoop/util/TestLightWeightGSet.java |  7 ++++---
 .../util/TestLightWeightResizableGSet.java      |  7 ++++---
 .../hadoop/util/TestNativeCodeLoader.java       |  6 +++---
 .../apache/hadoop/util/TestSignalLogger.java    | 11 ++++++-----
 .../org/apache/hadoop/util/TestWinUtils.java    |  6 +++---
 .../org/apache/hadoop/mount/MountdBase.java     | 12 ++++++------
 .../java/org/apache/hadoop/nfs/NfsExports.java  |  6 +++---
 .../org/apache/hadoop/nfs/nfs3/FileHandle.java  |  6 +++---
 .../org/apache/hadoop/nfs/nfs3/Nfs3Base.java    | 10 +++++-----
 .../hadoop/oncrpc/RegistrationClient.java       |  7 ++++---
 .../java/org/apache/hadoop/oncrpc/RpcCall.java  | 10 +++++-----
 .../org/apache/hadoop/oncrpc/RpcProgram.java    |  6 +++---
 .../java/org/apache/hadoop/oncrpc/RpcUtil.java  | 11 ++++++-----
 .../hadoop/oncrpc/SimpleTcpClientHandler.java   |  7 ++++---
 .../apache/hadoop/oncrpc/SimpleTcpServer.java   |  7 ++++---
 .../apache/hadoop/oncrpc/SimpleUdpServer.java   |  7 ++++---
 .../hadoop/oncrpc/security/Credentials.java     |  6 +++---
 .../hadoop/oncrpc/security/SecurityHandler.java |  7 ++++---
 .../java/org/apache/hadoop/portmap/Portmap.java |  8 ++++----
 .../hadoop/portmap/RpcProgramPortmap.java       |  7 ++++---
 257 files changed, 1021 insertions(+), 932 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 550aee7..de52fbb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -79,8 +79,6 @@ import javax.xml.transform.stream.StreamResult;
 
 import com.google.common.base.Charsets;
 import org.apache.commons.collections.map.UnmodifiableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -98,6 +96,8 @@ import org.apache.hadoop.util.StringInterner;
 import org.apache.hadoop.util.StringUtils;
 import org.codehaus.stax2.XMLInputFactory2;
 import org.codehaus.stax2.XMLStreamReader2;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
@@ -192,11 +192,12 @@ import com.google.common.base.Strings;
 @InterfaceStability.Stable
 public class Configuration implements Iterable<Map.Entry<String,String>>,
                                       Writable {
-  private static final Log LOG =
-    LogFactory.getLog(Configuration.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Configuration.class);
 
-  private static final Log LOG_DEPRECATION =
-    LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation");
+  private static final Logger LOG_DEPRECATION =
+      LoggerFactory.getLogger(
+          "org.apache.hadoop.conf.Configuration.deprecation");
 
   private boolean quietmode = true;
 
@@ -2885,10 +2886,10 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       }
       return null;
     } catch (IOException e) {
-      LOG.fatal("error parsing conf " + name, e);
+      LOG.error("error parsing conf " + name, e);
       throw new RuntimeException(e);
     } catch (XMLStreamException e) {
-      LOG.fatal("error parsing conf " + name, e);
+      LOG.error("error parsing conf " + name, e);
       throw new RuntimeException(e);
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
index bdd006d..146c6d8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
@@ -22,9 +22,10 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Collection;
@@ -41,8 +42,8 @@ import java.util.Map;
 public abstract class ReconfigurableBase 
   extends Configured implements Reconfigurable {
   
-  private static final Log LOG =
-    LogFactory.getLog(ReconfigurableBase.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReconfigurableBase.class);
   // Use for testing purpose.
   private ReconfigurationUtil reconfigurationUtil = new ReconfigurationUtil();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
index bb221ee..5a616f7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.conf;
 
-import org.apache.commons.logging.*;
-
 import org.apache.commons.lang.StringEscapeUtils;
 
 import java.util.Collection;
@@ -33,6 +31,8 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A servlet for changing a node's configuration.
@@ -45,8 +45,8 @@ public class ReconfigurationServlet extends HttpServlet {
   
   private static final long serialVersionUID = 1L;
 
-  private static final Log LOG =
-    LogFactory.getLog(ReconfigurationServlet.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReconfigurationServlet.class);
 
   // the prefix used to fing the attribute holding the reconfigurable 
   // for a given request

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
index 61ee743..de0e5dd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
@@ -26,12 +26,12 @@ import javax.crypto.Cipher;
 import javax.crypto.spec.IvParameterSpec;
 import javax.crypto.spec.SecretKeySpec;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY;
@@ -42,8 +42,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
 @InterfaceAudience.Private
 public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec {
-  private static final Log LOG =
-      LogFactory.getLog(JceAesCtrCryptoCodec.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JceAesCtrCryptoCodec.class.getName());
   
   private Configuration conf;
   private String provider;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
index d08e588..8d01f42 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
@@ -26,22 +26,22 @@ import java.security.GeneralSecurityException;
 import java.security.SecureRandom;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.crypto.random.OsSecureRandom;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implement the AES-CTR crypto codec using JNI into OpenSSL.
  */
 @InterfaceAudience.Private
 public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec {
-  private static final Log LOG =
-      LogFactory.getLog(OpensslAesCtrCryptoCodec.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslAesCtrCryptoCodec.class.getName());
 
   private Configuration conf;
   private Random random;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 6a03bb6..133a9f9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -26,13 +26,13 @@ import javax.crypto.IllegalBlockSizeException;
 import javax.crypto.NoSuchPaddingException;
 import javax.crypto.ShortBufferException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.util.PerformanceAdvisory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OpenSSL cipher using JNI.
@@ -41,8 +41,8 @@ import org.apache.hadoop.util.PerformanceAdvisory;
  */
 @InterfaceAudience.Private
 public final class OpensslCipher {
-  private static final Log LOG =
-      LogFactory.getLog(OpensslCipher.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslCipher.class.getName());
   public static final int ENCRYPT_MODE = 1;
   public static final int DECRYPT_MODE = 0;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
index 6c53a0a..1219bf9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.crypto.random;
 
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.util.PerformanceAdvisory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OpenSSL secure random using JNI.
@@ -44,8 +44,8 @@ import org.apache.hadoop.util.PerformanceAdvisory;
 @InterfaceAudience.Private
 public class OpensslSecureRandom extends Random {
   private static final long serialVersionUID = -7828193502768789584L;
-  private static final Log LOG =
-      LogFactory.getLog(OpensslSecureRandom.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpensslSecureRandom.class.getName());
   
   /** If native SecureRandom unavailable, use java SecureRandom */
   private java.security.SecureRandom fallback = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
index 9428b98..6671591 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT;
@@ -39,7 +39,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
 @InterfaceAudience.Private
 public class OsSecureRandom extends Random implements Closeable, Configurable {
-  public static final Log LOG = LogFactory.getLog(OsSecureRandom.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(OsSecureRandom.class);
   
   private static final long serialVersionUID = 6391500337172057900L;
 
@@ -112,7 +113,7 @@ public class OsSecureRandom extends Random implements Closeable, Configurable {
   @Override
   synchronized public void close() {
     if (stream != null) {
-      IOUtils.cleanup(LOG, stream);
+      IOUtils.cleanupWithLogger(LOG, stream);
       stream = null;
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index ef68437..9bea8f9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -32,8 +32,6 @@ import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -52,6 +50,8 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class provides an interface for implementors of a Hadoop file system
@@ -66,7 +66,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class AbstractFileSystem {
-  static final Log LOG = LogFactory.getLog(AbstractFileSystem.class);
+  static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class);
 
   /** Recording statistics per a file system class. */
   private static final Map<URI, Statistics> 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index 0a8cc73..75622ad 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -27,14 +27,14 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Abstract Checksumed Fs.
@@ -110,8 +110,8 @@ public abstract class ChecksumFs extends FilterFs {
    * It verifies that data matches checksums.
    *******************************************************/
   private static class ChecksumFSInputChecker extends FSInputChecker {
-    public static final Log LOG 
-      = LogFactory.getLog(FSInputChecker.class);
+    public static final Logger LOG =
+        LoggerFactory.getLogger(FSInputChecker.class);
     private static final int HEADER_LENGTH = 8;
     
     private ChecksumFs fs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 3542a9b..09c3a8a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -26,12 +26,12 @@ import java.util.concurrent.DelayQueue;
 import java.util.concurrent.Delayed;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A daemon thread that waits for the next file system to renew.
@@ -39,8 +39,8 @@ import org.apache.hadoop.util.Time;
 @InterfaceAudience.Private
 public class DelegationTokenRenewer
     extends Thread {
-  private static final Log LOG = LogFactory
-      .getLog(DelegationTokenRenewer.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(DelegationTokenRenewer.class);
 
   /** The renewable interface used by the renewer. */
   public interface Renewable {
@@ -243,7 +243,7 @@ public class DelegationTokenRenewer
         LOG.error("Interrupted while canceling token for " + fs.getUri()
             + "filesystem");
         if (LOG.isDebugEnabled()) {
-          LOG.debug(ie.getStackTrace());
+          LOG.debug("Exception in removeRenewAction: ", ie);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index 9b66c95..4f06e26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -22,11 +22,12 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.util.zip.Checksum;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.nio.ByteBuffer;
 import java.nio.IntBuffer;
 
@@ -37,8 +38,8 @@ import java.nio.IntBuffer;
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 abstract public class FSInputChecker extends FSInputStream {
-  public static final Log LOG 
-  = LogFactory.getLog(FSInputChecker.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(FSInputChecker.class);
   
   /** The file name from which data is read from */
   protected Path file;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index 160a63d..fef968b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -35,8 +35,6 @@ import java.util.Stack;
 import java.util.TreeSet;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -63,6 +61,8 @@ import org.apache.hadoop.util.ShutdownHookManager;
 
 import com.google.common.base.Preconditions;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The FileContext class provides an interface for users of the Hadoop
@@ -169,7 +169,7 @@ import org.apache.htrace.core.Tracer;
 @InterfaceStability.Stable
 public class FileContext {
   
-  public static final Log LOG = LogFactory.getLog(FileContext.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FileContext.class);
   /**
    * Default permission for directory and symlink
    * In previous versions, this default permission was also used to
@@ -332,7 +332,7 @@ public class FileContext {
         }
       });
     } catch (InterruptedException ex) {
-      LOG.error(ex);
+      LOG.error(ex.toString());
       throw new IOException("Failed to get the AbstractFileSystem for path: "
           + uri, ex);
     }
@@ -446,7 +446,7 @@ public class FileContext {
     } catch (UnsupportedFileSystemException ex) {
       throw ex;
     } catch (IOException ex) {
-      LOG.error(ex);
+      LOG.error(ex.toString());
       throw new RuntimeException(ex);
     }
     return getFileContext(defaultAfs, aConf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index b656a87..eb8a5c3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -45,8 +45,6 @@ import java.util.zip.ZipFile;
 import org.apache.commons.collections.map.CaseInsensitiveMap;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -57,6 +55,8 @@ import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A collection of file-processing util methods
@@ -65,7 +65,7 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceStability.Evolving
 public class FileUtil {
 
-  private static final Log LOG = LogFactory.getLog(FileUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
 
   /* The error code is defined in winutils to indicate insufficient
    * privilege to create symbolic links. This value need to keep in
@@ -697,7 +697,7 @@ public class FileUtil {
         entry = tis.getNextTarEntry();
       }
     } finally {
-      IOUtils.cleanup(LOG, tis, inputStream);
+      IOUtils.cleanupWithLogger(LOG, tis, inputStream);
     }
   }
 
@@ -1287,7 +1287,7 @@ public class FileUtil {
       bos = new BufferedOutputStream(fos);
       jos = new JarOutputStream(bos, jarManifest);
     } finally {
-      IOUtils.cleanup(LOG, jos, bos, fos);
+      IOUtils.cleanupWithLogger(LOG, jos, bos, fos);
     }
     String[] jarCp = {classPathJar.getCanonicalPath(),
                         unexpandedWildcardClasspath.toString()};

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 59d15c2..721f4df 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
 import java.util.LinkedList;
 
 import org.apache.commons.lang.WordUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -39,12 +37,14 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Provide command line access to a FileSystem. */
 @InterfaceAudience.Private
 public class FsShell extends Configured implements Tool {
   
-  static final Log LOG = LogFactory.getLog(FsShell.class);
+  static final Logger LOG = LoggerFactory.getLogger(FsShell.class);
 
   private static final int MAX_LINE_WIDTH = 80;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
index 0a82929..76e379c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
@@ -22,7 +22,6 @@ import java.util.LinkedList;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.permission.ChmodParser;
@@ -32,6 +31,7 @@ import org.apache.hadoop.fs.shell.CommandFormat;
 import org.apache.hadoop.fs.shell.FsCommand;
 import org.apache.hadoop.fs.shell.PathData;
 import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
 
 /**
  * This class is the home for file permissions related commands.
@@ -41,7 +41,7 @@ import org.apache.hadoop.util.Shell;
 @InterfaceStability.Unstable
 public class FsShellPermissions extends FsCommand {
 
-  static Log LOG = FsShell.LOG;
+  static final Logger LOG = FsShell.LOG;
   
   /**
    * Register the permission related commands with the factory

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
index 7c69167..ca3db1d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
@@ -23,18 +23,19 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 class Globber {
-  public static final Log LOG = LogFactory.getLog(Globber.class.getName());
+  public static final Logger LOG =
+      LoggerFactory.getLogger(Globber.class.getName());
 
   private final FileSystem fs;
   private final FileContext fc;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index 6a1e8bd..4c2fd1b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.fs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.EOFException;
 import java.io.FileNotFoundException;
@@ -50,7 +50,8 @@ import java.util.*;
 
 public class HarFileSystem extends FileSystem {
 
-  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HarFileSystem.class);
 
   public static final String METADATA_CACHE_ENTRIES_KEY = "fs.har.metadatacache.entries";
   public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
@@ -1173,7 +1174,7 @@ public class HarFileSystem extends FileSystem {
         LOG.warn("Encountered exception ", ioe);
         throw ioe;
       } finally {
-        IOUtils.cleanup(LOG, lin, in);
+        IOUtils.cleanupWithLogger(LOG, lin, in);
       }
 
       FSDataInputStream aIn = fs.open(archiveIndexPath);
@@ -1198,7 +1199,7 @@ public class HarFileSystem extends FileSystem {
           }
         }
       } finally {
-        IOUtils.cleanup(LOG, aIn);
+        IOUtils.cleanupWithLogger(LOG, aIn);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 1ed01ea..c1e9d21 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -23,14 +23,15 @@ import java.util.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.commons.logging.*;
 import org.apache.hadoop.util.*;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration; 
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** An implementation of a round-robin scheme for disk allocation for creating
  * files. The way it works is that it is kept track what disk was last
@@ -245,8 +246,8 @@ public class LocalDirAllocator {
   
   private static class AllocatorPerContext {
 
-    private final Log LOG =
-      LogFactory.getLog(AllocatorPerContext.class);
+    private static final Logger LOG =
+        LoggerFactory.getLogger(AllocatorPerContext.class);
 
     private Random dirIndexRandomizer = new Random();
     private String contextCfgItemName;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
index b771812..49cd600 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
@@ -19,11 +19,12 @@ package org.apache.hadoop.fs;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** 
  * Provides a trash facility which supports pluggable Trash policies. 
@@ -34,8 +35,8 @@ import org.apache.hadoop.conf.Configured;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class Trash extends Configured {
-  private static final org.apache.commons.logging.Log LOG =
-      LogFactory.getLog(Trash.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Trash.class);
 
   private TrashPolicy trashPolicy; // configured trash policy instance
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
index c65e16a..265e967 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
@@ -30,8 +30,6 @@ import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -41,6 +39,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Provides a <i>trash</i> feature.  Files are moved to a user's trash
  * directory, a subdirectory of their home directory named ".Trash".  Files are
@@ -54,8 +54,8 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class TrashPolicyDefault extends TrashPolicy {
-  private static final Log LOG =
-    LogFactory.getLog(TrashPolicyDefault.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TrashPolicyDefault.class);
 
   private static final Path CURRENT = new Path("Current");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
index 5f4c8552..4c1236b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
@@ -25,8 +25,6 @@ import java.net.URI;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.ftp.FTP;
 import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPFile;
@@ -45,6 +43,8 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -56,8 +56,8 @@ import org.apache.hadoop.util.Progressable;
 @InterfaceStability.Stable
 public class FTPFileSystem extends FileSystem {
 
-  public static final Log LOG = LogFactory
-      .getLog(FTPFileSystem.class);
+  public static final Logger LOG = LoggerFactory
+      .getLogger(FTPFileSystem.class);
 
   public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
index ddb2724..73ab5f6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -24,8 +24,6 @@ import java.io.InvalidObjectException;
 import java.io.ObjectInputValidation;
 import java.io.Serializable;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -33,6 +31,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A class for file/directory permissions.
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.WritableFactory;
 @InterfaceStability.Stable
 public class FsPermission implements Writable, Serializable,
     ObjectInputValidation {
-  private static final Log LOG = LogFactory.getLog(FsPermission.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FsPermission.class);
   private static final long serialVersionUID = 0x2fe08564;
 
   static final WritableFactory FACTORY = new WritableFactory() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
index c7fae7b..de86bab 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
@@ -23,19 +23,20 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
 
 import com.jcraft.jsch.ChannelSftp;
 import com.jcraft.jsch.JSch;
 import com.jcraft.jsch.JSchException;
 import com.jcraft.jsch.Session;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** Concurrent/Multiple Connections. */
 class SFTPConnectionPool {
 
-  public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SFTPFileSystem.class);
   // Maximum number of allowed live connections. This doesn't mean we cannot
   // have more live connections. It means that when we have more
   // live connections than this threshold, any unused connection will be

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
index 6de69fa..421769d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java
@@ -26,8 +26,6 @@ import java.net.URLDecoder;
 import java.util.ArrayList;
 import java.util.Vector;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -41,11 +39,14 @@ import com.jcraft.jsch.ChannelSftp;
 import com.jcraft.jsch.ChannelSftp.LsEntry;
 import com.jcraft.jsch.SftpATTRS;
 import com.jcraft.jsch.SftpException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** SFTP FileSystem. */
 public class SFTPFileSystem extends FileSystem {
 
-  public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SFTPFileSystem.class);
 
   private SFTPConnectionPool connectionPool;
   private URI uri;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index 4c5cbad..c292cf6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -27,8 +27,6 @@ import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +34,8 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An abstract class for the execution of a file system command
@@ -59,7 +59,7 @@ abstract public class Command extends Configured {
   private int depth = 0;
   protected ArrayList<Exception> exceptions = new ArrayList<Exception>();
 
-  private static final Log LOG = LogFactory.getLog(Command.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Command.class);
 
   /** allows stdout to be captured if necessary */
   public PrintStream out = System.out;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index cf95a49..93fd2cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -26,8 +26,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -47,6 +45,8 @@ import org.apache.zookeeper.KeeperException.Code;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * 
@@ -141,7 +141,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   @VisibleForTesting
   protected static final String BREADCRUMB_FILENAME = "ActiveBreadCrumb";
 
-  public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(ActiveStandbyElector.class);
 
   private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000;
 
@@ -712,7 +713,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   }
 
   private void fatalError(String errorMessage) {
-    LOG.fatal(errorMessage);
+    LOG.error(errorMessage);
     reset();
     appClient.notifyFatalError(errorMessage);
   }
@@ -824,10 +825,10 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
         createConnection();
         success = true;
       } catch(IOException e) {
-        LOG.warn(e);
+        LOG.warn(e.toString());
         sleepFor(5000);
       } catch(KeeperException e) {
-        LOG.warn(e);
+        LOG.warn(e.toString());
         sleepFor(5000);
       }
       ++connectionRetryCount;
@@ -866,7 +867,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
     try {
       tempZk.close();
     } catch(InterruptedException e) {
-      LOG.warn(e);
+      LOG.warn(e.toString());
     }
     zkConnectionState = ConnectionState.TERMINATED;
     wantToBeInElection = false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
index d952e29..3c05a25 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
@@ -19,9 +19,6 @@ package org.apache.hadoop.ha;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -32,6 +29,8 @@ import org.apache.hadoop.ha.HAServiceProtocol.RequestSource;
 import org.apache.hadoop.ipc.RPC;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The FailOverController is responsible for electing an active service
@@ -43,7 +42,8 @@ import com.google.common.base.Preconditions;
 @InterfaceStability.Evolving
 public class FailoverController {
 
-  private static final Log LOG = LogFactory.getLog(FailoverController.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FailoverController.class);
 
   private final int gracefulFenceTimeout;
   private final int rpcTimeoutToNewActive;
@@ -252,7 +252,7 @@ public class FailoverController {
         } catch (FailoverFailedException ffe) {
           msg += ". Failback to " + fromSvc +
             " failed (" + ffe.getMessage() + ")";
-          LOG.fatal(msg);
+          LOG.error(msg);
         }
       }
       throw new FailoverFailedException(msg, cause);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index 5eff14c..9b7d7ba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -28,8 +28,6 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -43,6 +41,8 @@ import org.apache.hadoop.util.ToolRunner;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A command-line tool for making calls in the HAServiceProtocol.
@@ -62,7 +62,7 @@ public abstract class HAAdmin extends Configured implements Tool {
    * operation, which is why it is not documented in the usage below.
    */
   private static final String FORCEMANUAL = "forcemanual";
-  private static final Log LOG = LogFactory.getLog(HAAdmin.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HAAdmin.class);
 
   private int rpcTimeoutForChecks = -1;
   
@@ -449,7 +449,7 @@ public abstract class HAAdmin extends Configured implements Tool {
     
     if (cmdLine.hasOption(FORCEMANUAL)) {
       if (!confirmForceManual()) {
-        LOG.fatal("Aborted");
+        LOG.error("Aborted");
         return -1;
       }
       // Instruct the NNs to honor this request even if they're

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
index 24c149c..a93df75 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
@@ -23,8 +23,6 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.ha.HAServiceProtocol;
@@ -35,6 +33,8 @@ import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.util.Daemon;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is a daemon which runs in a loop, periodically heartbeating
@@ -47,7 +47,7 @@ import com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Private
 public class HealthMonitor {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       HealthMonitor.class);
 
   private Daemon daemon;
@@ -283,7 +283,7 @@ public class HealthMonitor {
       setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
         @Override
         public void uncaughtException(Thread t, Throwable e) {
-          LOG.fatal("Health monitor failed", e);
+          LOG.error("Health monitor failed", e);
           enterState(HealthMonitor.State.HEALTH_MONITOR_FAILED);
         }
       });

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
index 1afd937..2247a34 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java
@@ -22,8 +22,6 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -31,6 +29,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class parses the configured list of fencing methods, and
@@ -61,7 +61,7 @@ public class NodeFencer {
   private static final Pattern HASH_COMMENT_RE =
     Pattern.compile("#.*$");
 
-  private static final Log LOG = LogFactory.getLog(NodeFencer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(NodeFencer.class);
 
   /**
    * Standard fencing methods included with Hadoop.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
index 64cd5a8..9ae113b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
@@ -23,8 +23,6 @@ import java.util.Collection;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -272,7 +270,7 @@ public class SshFenceByTcpPort extends Configured
    * Adapter from JSch's logger interface to our log4j
    */
   private static class LogAdapter implements com.jcraft.jsch.Logger {
-    static final Log LOG = LogFactory.getLog(
+    static final Logger LOG = LoggerFactory.getLogger(
         SshFenceByTcpPort.class.getName() + ".jsch");
 
     @Override
@@ -285,9 +283,8 @@ public class SshFenceByTcpPort extends Configured
       case com.jcraft.jsch.Logger.WARN:
         return LOG.isWarnEnabled();
       case com.jcraft.jsch.Logger.ERROR:
-        return LOG.isErrorEnabled();
       case com.jcraft.jsch.Logger.FATAL:
-        return LOG.isFatalEnabled();
+        return LOG.isErrorEnabled();
       default:
         return false;
       }
@@ -306,10 +303,8 @@ public class SshFenceByTcpPort extends Configured
         LOG.warn(message);
         break;
       case com.jcraft.jsch.Logger.ERROR:
-        LOG.error(message);
-        break;
       case com.jcraft.jsch.Logger.FATAL:
-        LOG.fatal(message);
+        LOG.error(message);
         break;
       default:
         break;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
index 055bcaa..20a4681 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
@@ -28,8 +28,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -56,11 +54,13 @@ import org.apache.zookeeper.data.ACL;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.LimitedPrivate("HDFS")
 public abstract class ZKFailoverController {
 
-  static final Log LOG = LogFactory.getLog(ZKFailoverController.class);
+  static final Logger LOG = LoggerFactory.getLogger(ZKFailoverController.class);
   
   public static final String ZK_QUORUM_KEY = "ha.zookeeper.quorum";
   private static final String ZK_SESSION_TIMEOUT_KEY = "ha.zookeeper.session-timeout.ms";
@@ -162,7 +162,7 @@ public abstract class ZKFailoverController {
 
   public int run(final String[] args) throws Exception {
     if (!localTarget.isAutoFailoverEnabled()) {
-      LOG.fatal("Automatic failover is not enabled for " + localTarget + "." +
+      LOG.error("Automatic failover is not enabled for " + localTarget + "." +
           " Please ensure that automatic failover is enabled in the " +
           "configuration before running the ZK failover controller.");
       return ERR_CODE_AUTO_FAILOVER_NOT_ENABLED;
@@ -184,7 +184,7 @@ public abstract class ZKFailoverController {
         }
       });
     } catch (RuntimeException rte) {
-      LOG.fatal("The failover controller encounters runtime error: " + rte);
+      LOG.error("The failover controller encounters runtime error: " + rte);
       throw (Exception)rte.getCause();
     }
   }
@@ -195,7 +195,7 @@ public abstract class ZKFailoverController {
     try {
       initZK();
     } catch (KeeperException ke) {
-      LOG.fatal("Unable to start failover controller. Unable to connect "
+      LOG.error("Unable to start failover controller. Unable to connect "
           + "to ZooKeeper quorum at " + zkQuorum + ". Please check the "
           + "configured value for " + ZK_QUORUM_KEY + " and ensure that "
           + "ZooKeeper is running.");
@@ -221,7 +221,7 @@ public abstract class ZKFailoverController {
     }
 
     if (!elector.parentZNodeExists()) {
-      LOG.fatal("Unable to start failover controller. "
+      LOG.error("Unable to start failover controller. "
           + "Parent znode does not exist.\n"
           + "Run with -formatZK flag to initialize ZooKeeper.");
       return ERR_CODE_NO_PARENT_ZNODE;
@@ -230,7 +230,7 @@ public abstract class ZKFailoverController {
     try {
       localTarget.checkFencingConfigured();
     } catch (BadFencingConfigurationException e) {
-      LOG.fatal("Fencing is not configured for " + localTarget + ".\n" +
+      LOG.error("Fencing is not configured for " + localTarget + ".\n" +
           "You must configure a fencing method before using automatic " +
           "failover.", e);
       return ERR_CODE_NO_FENCER;
@@ -376,7 +376,7 @@ public abstract class ZKFailoverController {
   }
   
   private synchronized void fatalError(String err) {
-    LOG.fatal("Fatal error occurred:" + err);
+    LOG.error("Fatal error occurred:" + err);
     fatalError = err;
     notifyAll();
   }
@@ -395,7 +395,7 @@ public abstract class ZKFailoverController {
 
     } catch (Throwable t) {
       String msg = "Couldn't make " + localTarget + " active";
-      LOG.fatal(msg, t);
+      LOG.error(msg, t);
       
       recordActiveAttempt(new ActiveAttemptRecord(false, msg + "\n" +
           StringUtils.stringifyException(t)));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
index 63bfbca..7f75582 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.ha.protocolPB;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.ha.HAServiceProtocol;
@@ -42,6 +40,8 @@ import org.apache.hadoop.ipc.RPC;
 
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is used on the server side. Calls come across the wire for the
@@ -61,7 +61,7 @@ public class HAServiceProtocolServerSideTranslatorPB implements
       TransitionToActiveResponseProto.newBuilder().build();
   private static final TransitionToStandbyResponseProto TRANSITION_TO_STANDBY_RESP = 
       TransitionToStandbyResponseProto.newBuilder().build();
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       HAServiceProtocolServerSideTranslatorPB.class);
   
   public HAServiceProtocolServerSideTranslatorPB(HAServiceProtocol server) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index d7436b2..28b9bb0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -53,8 +53,6 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.sun.jersey.spi.container.servlet.ServletContainer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -103,6 +101,8 @@ import org.eclipse.jetty.util.MultiException;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal is
@@ -117,7 +117,7 @@ import org.eclipse.jetty.webapp.WebAppContext;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public final class HttpServer2 implements FilterContainer {
-  public static final Log LOG = LogFactory.getLog(HttpServer2.class);
+  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
 
   public static final String HTTP_SCHEME = "http";
   public static final String HTTPS_SCHEME = "https";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
index 9ca5b92..fc64697 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
@@ -29,11 +29,11 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequestWrapper;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.http.FilterInitializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.Filter;
 
@@ -47,7 +47,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_S
 public class StaticUserWebFilter extends FilterInitializer {
   static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
 
-  private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StaticUserWebFilter.class);
 
   static class User implements Principal {
     private final String name;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
index d4514c6..519fcd7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
@@ -22,8 +22,6 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +34,8 @@ import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
 import org.apache.hadoop.util.bloom.Key;
 import org.apache.hadoop.util.hash.Hash;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_KEY;
@@ -52,7 +52,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOO
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class BloomMapFile {
-  private static final Log LOG = LogFactory.getLog(BloomMapFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BloomMapFile.class);
   public static final String BLOOM_FILE_NAME = "bloom";
   public static final int HASH_COUNT = 5;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccaf0366/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
index 705678e..a2903f8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
@@ -22,11 +22,10 @@ import java.nio.ByteOrder;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import sun.misc.Unsafe;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import com.google.common.primitives.Longs;
 import com.google.common.primitives.UnsignedBytes;
 
@@ -36,7 +35,7 @@ import com.google.common.primitives.UnsignedBytes;
  * class to be able to compare arrays that start at non-zero offsets.
  */
 abstract class FastByteComparisons {
-  static final Log LOG = LogFactory.getLog(FastByteComparisons.class);
+  static final Logger LOG = LoggerFactory.getLogger(FastByteComparisons.class);
 
   /**
    * Lexicographically compare two byte arrays.


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org