You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2021/03/20 01:22:05 UTC

[hbase] branch master updated: HBASE-19577 Use log4j2 instead of log4j for logging (#1708)

This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new ba3610d  HBASE-19577 Use log4j2 instead of log4j for logging (#1708)
ba3610d is described below

commit ba3610d097edf985f77817ec4e65f6580594d47d
Author: Duo Zhang <zh...@apache.org>
AuthorDate: Sat Mar 20 09:21:25 2021 +0800

    HBASE-19577 Use log4j2 instead of log4j for logging (#1708)
    
    Signed-off-by: stack <st...@apache.org>
---
 bin/hbase                                          |   25 +-
 bin/hbase-daemon.sh                                |   16 +-
 bin/hbase.cmd                                      |   16 +-
 conf/log4j-hbtop.properties                        |   27 -
 conf/log4j.properties                              |  139 ---
 .../log4j2-hbtop.xml                               |   47 +-
 conf/log4j2.xml                                    |  103 ++
 hbase-archetypes/hbase-client-project/pom.xml      |   18 +-
 .../src/main/resources/log4j.properties            |  121 ---
 .../src/main/resources/log4j2.xml                  |  103 ++
 .../hbase-shaded-client-project/pom.xml            |   18 +-
 .../src/main/resources/log4j.properties            |  121 ---
 .../src/main/resources/log4j2.xml                  |  103 ++
 hbase-assembly/pom.xml                             |   16 +-
 hbase-assembly/src/main/assembly/client.xml        |   12 +-
 .../src/main/assembly/hadoop-three-compat.xml      |   12 +-
 hbase-asyncfs/pom.xml                              |   18 +-
 .../hadoop/hbase/io/asyncfs/AsyncFSTestBase.java   |    5 -
 hbase-backup/pom.xml                               |   18 +-
 hbase-balancer/pom.xml                             |   13 +-
 hbase-client/pom.xml                               |   13 +-
 .../hadoop/hbase/ipc/TestFailedServersLog.java     |   67 +-
 .../hbase/security/TestHBaseSaslRpcClient.java     |   14 +-
 hbase-common/pom.xml                               |   13 +-
 .../hadoop/hbase/logging/TestLog4jUtils.java       |   39 +-
 hbase-endpoint/pom.xml                             |   18 +-
 hbase-examples/pom.xml                             |   18 +-
 hbase-hadoop-compat/pom.xml                        |  324 +++---
 hbase-hbtop/pom.xml                                |   13 +-
 hbase-http/pom.xml                                 |   13 +-
 .../org/apache/hadoop/hbase/http/log/LogLevel.java |    4 +-
 .../apache/hadoop/hbase/util/LogMonitoring.java    |    8 +-
 .../apache/hadoop/hbase/http/log/TestLogLevel.java |  199 ++--
 hbase-it/pom.xml                                   |   18 +-
 hbase-logging/pom.xml                              |   28 +-
 .../hadoop/hbase/logging/InternalLog4jUtils.java   |   58 +-
 .../test/java/org/apache/log4j/FileAppender.java   |  288 ++++++
 hbase-logging/src/test/resources/log4j.properties  |   68 --
 hbase-logging/src/test/resources/log4j2.xml        |   45 +
 hbase-mapreduce/pom.xml                            |   18 +-
 .../org/apache/hadoop/hbase/util/LoadTestTool.java |   17 +-
 hbase-metrics-api/pom.xml                          |   13 +-
 hbase-metrics/pom.xml                              |   13 +-
 hbase-procedure/pom.xml                            |   13 +-
 hbase-replication/pom.xml                          |   13 +-
 hbase-rest/pom.xml                                 |   18 +-
 hbase-server/pom.xml                               |   18 +-
 .../apache/hadoop/hbase/HBaseTestingUtility.java   |    1 -
 .../TestAsyncTableBatchRetryImmediately.java       |   11 +-
 .../hbase/client/TestMultiRespectsLimits.java      |   10 +-
 .../apache/hadoop/hbase/ipc/TestProtoBufRpc.java   |    7 +-
 .../hbase/ipc/TestRpcServerTraceLogging.java       |   24 +-
 .../hbase/regionserver/TestMultiLogThreshold.java  |   64 +-
 .../TestRegionServerReportForDuty.java             |   34 +-
 .../compactions/PerfTestCompactionPolicies.java    |   11 +-
 .../apache/hadoop/hbase/tool/TestCanaryTool.java   |  171 ++--
 hbase-shaded/hbase-shaded-check-invariants/pom.xml |   16 +-
 .../hbase-shaded-client-byo-hadoop/pom.xml         |  230 ++---
 hbase-shaded/hbase-shaded-client/pom.xml           |  149 ++-
 hbase-shaded/hbase-shaded-mapreduce/pom.xml        |  462 ++++-----
 .../hbase-shaded-testing-util-tester/pom.xml       |  142 +--
 hbase-shaded/hbase-shaded-testing-util/pom.xml     |  427 ++++----
 .../pom.xml                                        |   15 +-
 hbase-shaded/pom.xml                               | 1074 ++++++++++----------
 hbase-shell/pom.xml                                |   18 +-
 hbase-testing-util/pom.xml                         |  330 +++---
 hbase-thrift/pom.xml                               |   18 +-
 hbase-zookeeper/pom.xml                            |   13 +-
 pom.xml                                            |  139 ++-
 69 files changed, 3088 insertions(+), 2600 deletions(-)

diff --git a/bin/hbase b/bin/hbase
index d2307c5..601e2c1 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -301,10 +301,13 @@ else
   # make it easier to check for shaded/not later on.
   shaded_jar=""
 fi
+# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j
+# to classpath, as they are all logging bridges. Only exclude log4j* so we
+# will not actually log anything out. Add it later if necessary
 for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
   if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
-     [ "${f}" != "htrace-core.jar$" ] && \
-     [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+     [[ "${f}" != "htrace-core.jar$" ]] && \
+     [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
     CLASSPATH="${CLASSPATH}:${f}"
   fi
 done
@@ -658,7 +661,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then
     for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
       if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
          [ "${f}" != "htrace-core.jar$" ] && \
-         [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+         [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
         echo -n ":${f}"
       fi
     done
@@ -775,7 +778,11 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
 HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
 HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
 HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
+# log4j2 does not support setting log level and appender at once, so we need to split HBASE_ROOT_LOGGER
+HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-INFO,console}
+array=(${HBASE_ROOT_LOGGER//,/ })
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.level=${array[0]}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.appender=${array[1]}"
 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
   HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
   export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
@@ -783,17 +790,19 @@ fi
 
 # Enable security logging on the master and regionserver only
 if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
-  HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
+  HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,RFAS}
 else
-  HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
+  HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}
 fi
+array=(${HBASE_SECURITY_LOGGER//,/ })
+HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.level=${array[0]}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.appender=${array[1]}"
 
 HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
 # by now if we're running a command it means we need logging
-for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do
   if [ -f "${f}" ]; then
     CLASSPATH="${CLASSPATH}:${f}"
-    break
   fi
 done
 
diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh
index 11c13eb..6fafab0 100755
--- a/bin/hbase-daemon.sh
+++ b/bin/hbase-daemon.sh
@@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java
 export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME
 export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log
 
-if [ -z "${HBASE_ROOT_LOGGER}" ]; then
-export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"}
+if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then
+export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"}
 fi
 
-if [ -z "${HBASE_SECURITY_LOGGER}" ]; then
-export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"}
+if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then
+export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then
+export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then
+export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"}
 fi
 
 HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"}
diff --git a/bin/hbase.cmd b/bin/hbase.cmd
index a927227..1fd39d2 100644
--- a/bin/hbase.cmd
+++ b/bin/hbase.cmd
@@ -329,7 +329,13 @@ set HBASE_OPTS=%HBASE_OPTS% -XX:OnOutOfMemoryError="taskkill /F /PID %p"
 if not defined HBASE_ROOT_LOGGER (
   set HBASE_ROOT_LOGGER=INFO,console
 )
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%"
+
+for /F "tokens=1,2 delims=," %%a in ("%HBASE_ROOT_LOGGER%") do (
+  set HBASE_ROOT_LOGGER_LEVEL=%%a
+  set HBASE_ROOT_LOGGER_APPENDER=%%b
+)
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger.level="%HBASE_ROOT_LOGGER_LEVEL% -Dhbase.root.logger.appender="%HBASE_ROOT_LOGGER_APPENDER% "
 
 if defined JAVA_LIBRARY_PATH (
   set HBASE_OPTS=%HBASE_OPTS% -Djava.library.path="%JAVA_LIBRARY_PATH%"
@@ -345,7 +351,13 @@ if not defined HBASE_SECURITY_LOGGER (
     set HBASE_SECURITY_LOGGER=INFO,DRFAS
   )
 )
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%"
+
+for /F "tokens=1,2 delims=," %%a in ("%HBASE_SECURITY_LOGGER%") do (
+  set HBASE_SECURITY_LOGGER_LEVEL=%%a
+  set HBASE_SECURITY_LOGGER_APPENDER=%%b
+)
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger.level="%HBASE_SECURITY_LOGGER_LEVEL% -Dhbase.security.logger.appender="%HBASE_SECURITY_LOGGER_APPENDER%"
 
 set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX%
 set java_arguments=%HEAP_SETTINGS% %HBASE_OPTS% -classpath "%CLASSPATH%" %CLASS% %hbase-command-arguments%
diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties
deleted file mode 100644
index 4d68d79..0000000
--- a/conf/log4j-hbtop.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=WARN,console
-log4j.threshold=WARN
-
-# console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# ZooKeeper will still put stuff at WARN
-log4j.logger.org.apache.zookeeper=ERROR
diff --git a/conf/log4j.properties b/conf/log4j.properties
deleted file mode 100644
index 2282fa5..0000000
--- a/conf/log4j.properties
+++ /dev/null
@@ -1,139 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-hbase.log.level=INFO
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
-log4j.appender.asyncconsole.target=System.err
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level}
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-
-# Disable request log by default, you can enable this by changing the appender
-log4j.category.http.requests=INFO,NullAppender
-log4j.additivity.http.requests=false
-# Replace the above with this configuration if you want an http access.log
-#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.accessRFA.File=/var/log/hbase/access.log
-#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.accessRFA.layout.ConversionPattern=%m%n
-#log4j.appender.accessRFA.MaxFileSize=200MB
-#log4j.appender.accessRFA.MaxBackupIndex=10
-# route http.requests to the accessRFA appender
-#log4j.logger.http.requests=INFO,accessRFA
-# disable http.requests.* entries going up to the root logger
-#log4j.additivity.http.requests=false
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/conf/log4j2-hbtop.xml
similarity index 51%
rename from hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
rename to conf/log4j2-hbtop.xml
index 939b453..de0fb57 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
+++ b/conf/log4j2-hbtop.xml
@@ -1,3 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -15,32 +17,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hbase;
-
-import org.apache.yetus.audience.InterfaceAudience;
-
-/**
- * Logger class that buffers before trying to log to the specified console.
- */
-@InterfaceAudience.Private
-public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender {
-  private final org.apache.log4j.ConsoleAppender consoleAppender;
-
-  public AsyncConsoleAppender() {
-    super();
-    consoleAppender = new org.apache.log4j.ConsoleAppender(
-      new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n"));
-    this.addAppender(consoleAppender);
-  }
-
-  public void setTarget(String value) {
-    consoleAppender.setTarget(value);
-  }
-
-  @Override
-  public void activateOptions() {
-    consoleAppender.activateOptions();
-    super.activateOptions();
-  }
-
-}
+-->
+<Configuration>
+  <Appenders>
+    <!-- Console appender -->
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+    </Console>
+  </Appenders>
+  <Loggers>
+    <Root level="warn">
+      <AppenderRef ref="console" />
+    </Root>
+    <!-- ZooKeeper will still put stuff at WARN -->
+    <Logger name="org.apache.zookeeper" level="error" />
+  </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/conf/log4j2.xml b/conf/log4j2.xml
new file mode 100644
index 0000000..63dbeba
--- /dev/null
+++ b/conf/log4j2.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<Configuration>
+  <Appenders>
+    <!-- Console appender -->
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+    </Console>
+    <!-- Daily Rolling File Appender -->
+    <RollingFile name="DRFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <TimeBasedTriggeringPolicy interval="1" />
+      </Policies>
+      <DefaultRolloverStrategy max="30" />
+    </RollingFile>
+    <!-- Rolling File Appender -->
+    <RollingFile name="RFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Security audit appender -->
+    <RollingFile name="RFAS"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Http Access Log RFA -->
+    <RollingFile name="AccessRFA"
+      fileName="/var/log/hbase/access.log"
+      filePattern="/var/log/hbase/access.log.%i">
+      <PatternLayout pattern="%m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="200MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10" />
+    </RollingFile>
+    <Null name="NullAppender" />
+  </Appenders>
+  <Loggers>
+    <Root level="${sys:hbase.root.logger.level:-info}">
+      <AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
+    </Root>
+    <Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
+      additivity="false">
+      <AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
+    </Logger>
+    <!-- Custom Logging levels -->
+    <!--
+    <Logger name="org.apache.zookeeper" level="debug"/>
+    <Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.META" level="debug"/>
+    Make these two classes below DEBUG to see more zk debug.
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
+    <Logger name="org.apache.hadoop.dfs" level="debug"/>
+    -->
+    <!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
+    <!-- Disable request log by default, you can enable this by changing the appender -->
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="NullAppender" />
+    </Logger>
+    <!--
+      Replace the above with this configuration if you want an http access.log
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="AccessRFA" />
+    </Logger>
+    -->
+  </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml
index e8192ed..86217f6 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -54,13 +54,23 @@
       <artifactId>hbase-client</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
       <scope>runtime</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>runtime</scope>
     </dependency>
     <dependency>
diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
deleted file mode 100644
index 0b01e57..0000000
--- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=INFO
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=INFO
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
-# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..63dbeba
--- /dev/null
+++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<Configuration>
+  <Appenders>
+    <!-- Console appender -->
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+    </Console>
+    <!-- Daily Rolling File Appender -->
+    <RollingFile name="DRFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <TimeBasedTriggeringPolicy interval="1" />
+      </Policies>
+      <DefaultRolloverStrategy max="30" />
+    </RollingFile>
+    <!-- Rolling File Appender -->
+    <RollingFile name="RFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Security audit appender -->
+    <RollingFile name="RFAS"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Http Access Log RFA -->
+    <RollingFile name="AccessRFA"
+      fileName="/var/log/hbase/access.log"
+      filePattern="/var/log/hbase/access.log.%i">
+      <PatternLayout pattern="%m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="200MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10" />
+    </RollingFile>
+    <Null name="NullAppender" />
+  </Appenders>
+  <Loggers>
+    <Root level="${sys:hbase.root.logger.level:-info}">
+      <AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
+    </Root>
+    <Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
+      additivity="false">
+      <AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
+    </Logger>
+    <!-- Custom Logging levels -->
+    <!--
+    <Logger name="org.apache.zookeeper" level="debug"/>
+    <Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.META" level="debug"/>
+    Make these two classes below DEBUG to see more zk debug.
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
+    <Logger name="org.apache.hadoop.dfs" level="debug"/>
+    -->
+    <!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
+    <!-- Disable request log by default, you can enable this by changing the appender -->
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="NullAppender" />
+    </Logger>
+    <!--
+      Replace the above with this configuration if you want an http access.log
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="AccessRFA" />
+    </Logger>
+    -->
+  </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml
index cd8dfaa..16276d2 100644
--- a/hbase-archetypes/hbase-shaded-client-project/pom.xml
+++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml
@@ -60,13 +60,23 @@
       <artifactId>hbase-shaded-client</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
       <scope>runtime</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>runtime</scope>
     </dependency>
     <dependency>
diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
deleted file mode 100644
index 0b01e57..0000000
--- a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=INFO
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=INFO
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
-# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..63dbeba
--- /dev/null
+++ b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<Configuration>
+  <Appenders>
+    <!-- Console appender -->
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+    </Console>
+    <!-- Daily Rolling File Appender -->
+    <RollingFile name="DRFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <TimeBasedTriggeringPolicy interval="1" />
+      </Policies>
+      <DefaultRolloverStrategy max="30" />
+    </RollingFile>
+    <!-- Rolling File Appender -->
+    <RollingFile name="RFA"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Security audit appender -->
+    <RollingFile name="RFAS"
+      fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
+      filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="20" />
+    </RollingFile>
+    <!-- Http Access Log RFA -->
+    <RollingFile name="AccessRFA"
+      fileName="/var/log/hbase/access.log"
+      filePattern="/var/log/hbase/access.log.%i">
+      <PatternLayout pattern="%m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="200MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10" />
+    </RollingFile>
+    <Null name="NullAppender" />
+  </Appenders>
+  <Loggers>
+    <Root level="${sys:hbase.root.logger.level:-info}">
+      <AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
+    </Root>
+    <Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
+      additivity="false">
+      <AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
+    </Logger>
+    <!-- Custom Logging levels -->
+    <!--
+    <Logger name="org.apache.zookeeper" level="debug"/>
+    <Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.META" level="debug"/>
+    Make these two classes below DEBUG to see more zk debug.
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
+    <Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
+    <Logger name="org.apache.hadoop.dfs" level="debug"/>
+    -->
+    <!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
+    <!-- Disable request log by default, you can enable this by changing the appender -->
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="NullAppender" />
+    </Logger>
+    <!--
+      Replace the above with this configuration if you want an http access.log
+    <Logger name="http.requests" level="info" additivity="false">
+      <AppenderRef ref="AccessRFA" />
+    </Logger>
+    -->
+  </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 8053517..160293c 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -332,12 +332,20 @@
       <artifactId>jul-to-slf4j</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
     </dependency>
   </dependencies>
 </project>
diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml
index bd65cb4..62828fa 100644
--- a/hbase-assembly/src/main/assembly/client.xml
+++ b/hbase-assembly/src/main/assembly/client.xml
@@ -61,10 +61,8 @@
               <exclude>org.apache.htrace:htrace-core4</exclude>
               <exclude>org.apache.htrace:htrace-core</exclude>
               <exclude>org.apache.yetus:audience-annotations</exclude>
-              <exclude>org.slf4j:slf4j-api</exclude>
-              <exclude>org.slf4j:jcl-over-slf4j</exclude>
-              <exclude>org.slf4j:jul-to-slf4j</exclude>
-              <exclude>org.slf4j:slf4j-log4j12</exclude>
+              <exclude>org.slf4j:*</exclude>
+              <exclude>org.apache.logging.log4j:*</exclude>
             </excludes>
           </dependencySet>
         </dependencySets>
@@ -149,10 +147,8 @@
         <include>org.apache.htrace:htrace-core4</include>
         <include>org.apache.htrace:htrace-core</include>
         <include>org.apache.yetus:audience-annotations</include>
-        <include>org.slf4j:slf4j-api</include>
-        <include>org.slf4j:jcl-over-slf4j</include>
-        <include>org.slf4j:jul-to-slf4j</include>
-        <include>org.slf4j:slf4j-log4j12</include>
+        <include>org.slf4j:*</include>
+        <include>org.apache.logging.log4j:*</include>
       </includes>
     </dependencySet>
   </dependencySets>
diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
index 1c172e9..ab2a4c5 100644
--- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
@@ -50,11 +50,9 @@
         <include>org.apache.hbase:hbase-metrics</include>
         <include>org.apache.hbase:hbase-metrics-api</include>
         <include>org.apache.hbase:hbase-procedure</include>
-        <include>org.apache.hbase:hbase-protocol</include>
         <include>org.apache.hbase:hbase-protocol-shaded</include>
         <include>org.apache.hbase:hbase-replication</include>
         <include>org.apache.hbase:hbase-rest</include>
-        <include>org.apache.hbase:hbase-rsgroup</include>
         <include>org.apache.hbase:hbase-server</include>
         <include>org.apache.hbase:hbase-shell</include>
         <include>org.apache.hbase:hbase-testing-util</include>
@@ -111,8 +109,8 @@
               <exclude>org.apache.htrace:htrace-core4</exclude>
               <exclude>org.apache.htrace:htrace-core</exclude>
               <exclude>org.apache.yetus:audience-annotations</exclude>
-              <exclude>org.slf4j:slf4j-api</exclude>
-              <exclude>org.slf4j:slf4j-log4j12</exclude>
+              <exclude>org.slf4j:*</exclude>
+              <exclude>org.apache.logging.log4j:*</exclude>
             </excludes>
           </dependencySet>
         </dependencySets>
@@ -209,10 +207,8 @@
         <include>org.apache.htrace:htrace-core4</include>
         <include>org.apache.htrace:htrace-core</include>
         <include>org.apache.yetus:audience-annotations</include>
-        <include>org.slf4j:slf4j-api</include>
-        <include>org.slf4j:jcl-over-slf4j</include>
-        <include>org.slf4j:jul-to-slf4j</include>
-        <include>org.slf4j:slf4j-log4j12</include>
+        <include>org.slf4j:*</include>
+        <include>org.apache.logging.log4j:*</include>
       </includes>
     </dependencySet>
     <dependencySet>
diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml
index 3d45e4c..1efc980 100644
--- a/hbase-asyncfs/pom.xml
+++ b/hbase-asyncfs/pom.xml
@@ -149,13 +149,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
index 9b276ac..12ba93fb 100644
--- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
+++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
@@ -98,11 +98,6 @@ public abstract class AsyncFSTestBase {
     createDirsAndSetProperties();
 
     Configuration conf = UTIL.getConfiguration();
-    // Error level to skip some warnings specific to the minicluster. See HBASE-4709
-    org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class)
-      .setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class)
-      .setLevel(org.apache.log4j.Level.ERROR);
 
     TraceUtil.initTracer(conf);
     CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build();
diff --git a/hbase-backup/pom.xml b/hbase-backup/pom.xml
index 972ea8d..a1fedc3 100644
--- a/hbase-backup/pom.xml
+++ b/hbase-backup/pom.xml
@@ -148,13 +148,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/hbase-balancer/pom.xml b/hbase-balancer/pom.xml
index 2fcbc2b..c321af5 100644
--- a/hbase-balancer/pom.xml
+++ b/hbase-balancer/pom.xml
@@ -108,13 +108,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index 63e81d9..f9982d6 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -155,13 +155,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
index fa44022..dc94e91 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
@@ -17,73 +17,82 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.net.Address;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Captor;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
-@RunWith(MockitoJUnitRunner.class)
 @Category({ ClientTests.class, SmallTests.class })
 public class TestFailedServersLog {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestFailedServersLog.class);
+    HBaseClassTestRule.forClass(TestFailedServersLog.class);
 
   static final int TEST_PORT = 9999;
-  private Address addr;
 
-  @Mock
-  private Appender mockAppender;
+  private Address addr;
 
-  @Captor
-  private ArgumentCaptor captorLoggingEvent;
+  private org.apache.logging.log4j.core.Appender mockAppender;
 
   @Before
   public void setup() {
-    LogManager.getRootLogger().addAppender(mockAppender);
+    mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(mockAppender.getName()).thenReturn("mockAppender");
+    when(mockAppender.isStarted()).thenReturn(true);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(FailedServers.class)).addAppender(mockAppender);
+
   }
 
   @After
   public void teardown() {
-    LogManager.getRootLogger().removeAppender(mockAppender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(FailedServers.class)).removeAppender(mockAppender);
   }
 
   @Test
   public void testAddToFailedServersLogging() {
-    Throwable nullException = new NullPointerException();
+    AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>();
+    AtomicReference<String> msg = new AtomicReference<String>();
+    doAnswer(new Answer<Void>() {
 
+      @Override
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        org.apache.logging.log4j.core.LogEvent logEvent =
+          invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
+        level.set(logEvent.getLevel());
+        msg.set(logEvent.getMessage().getFormattedMessage());
+        return null;
+      }
+    }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class));
+
+    Throwable nullException = new NullPointerException();
     FailedServers fs = new FailedServers(new Configuration());
     addr = Address.fromParts("localhost", TEST_PORT);
 
     fs.addToFailedServers(addr, nullException);
 
-    Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture());
-    LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue();
-    assertThat(loggingEvent.getLevel(), is(Level.DEBUG));
-    assertEquals("Added failed server with address " + addr.toString() + " to list caused by "
-        + nullException.toString(),
-      loggingEvent.getRenderedMessage());
+    verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class));
+    assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get());
+    assertEquals("Added failed server with address " + addr.toString() + " to list caused by " +
+      nullException.toString(), msg.get());
   }
-
 }
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 2252c21..538a9b9 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -30,7 +30,6 @@ import static org.mockito.Mockito.when;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.util.Map;
-
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.NameCallback;
 import javax.security.auth.callback.PasswordCallback;
@@ -39,7 +38,6 @@ import javax.security.auth.callback.UnsupportedCallbackException;
 import javax.security.sasl.RealmCallback;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslClient;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -55,16 +53,15 @@ import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
 import org.junit.Assert;
-import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.ExpectedException;
 import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Strings;
 
@@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient {
   static final String DEFAULT_USER_NAME = "principal";
   static final String DEFAULT_USER_PASSWORD = "password";
 
-  private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
 
 
   @Rule
   public ExpectedException exception = ExpectedException.none();
 
-  @BeforeClass
-  public static void before() {
-    Logger.getRootLogger().setLevel(Level.DEBUG);
-  }
-
   @Test
   public void testSaslClientUsesGivenRpcProtection() throws Exception {
     Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 8b91541..fca3c5b 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -232,13 +232,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
index 89931de..806107b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
@@ -24,9 +24,6 @@ import java.io.IOException;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -44,23 +41,29 @@ public class TestLog4jUtils {
 
   @Test
   public void test() {
-    Logger zk = LogManager.getLogger("org.apache.zookeeper");
-    Level zkLevel = zk.getEffectiveLevel();
-    Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
-    Level hbaseZkLevel = hbaseZk.getEffectiveLevel();
-    Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client");
-    Level clientLevel = client.getEffectiveLevel();
+    org.apache.logging.log4j.Logger zk =
+      org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper");
+    org.apache.logging.log4j.Level zkLevel = zk.getLevel();
+    org.apache.logging.log4j.Logger hbaseZk =
+      org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
+    org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel();
+    org.apache.logging.log4j.Logger client =
+      org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client");
+    org.apache.logging.log4j.Level clientLevel = client.getLevel();
     Log4jUtils.disableZkAndClientLoggers();
-    assertEquals(Level.OFF, zk.getLevel());
-    assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName()));
-    assertEquals(Level.OFF, hbaseZk.getLevel());
-    assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
-    assertEquals(Level.OFF, client.getLevel());
-    assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName()));
+    assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel());
+    assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+      Log4jUtils.getEffectiveLevel(zk.getName()));
+    assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel());
+    assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+      Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
+    assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel());
+    assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+      Log4jUtils.getEffectiveLevel(client.getName()));
     // restore the level
-    zk.setLevel(zkLevel);
-    hbaseZk.setLevel(hbaseZkLevel);
-    client.setLevel(clientLevel);
+    org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel);
+    org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel);
+    org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel);
   }
 
   @Test
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 09558d4..e5ca742 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -194,13 +194,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 0328eee..91d5894 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -272,13 +272,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index a687de8..dec129d 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -1,5 +1,7 @@
 <?xml version="1.0"?>
-<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="https://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <!--
     /**
      * Licensed to the Apache Software Foundation (ASF) under one
@@ -19,23 +21,23 @@
      * limitations under the License.
      */
     -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-build-configuration</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>../hbase-build-configuration</relativePath>
-    </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-build-configuration</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>../hbase-build-configuration</relativePath>
+  </parent>
 
-    <artifactId>hbase-hadoop-compat</artifactId>
-    <name>Apache HBase - Hadoop Compatibility</name>
-    <description>
+  <artifactId>hbase-hadoop-compat</artifactId>
+  <name>Apache HBase - Hadoop Compatibility</name>
+  <description>
         Interfaces to be implemented in order to smooth
         over hadoop version differences
     </description>
 
-    <build>
-      <plugins>
+  <build>
+    <plugins>
       <plugin>
         <!--Make it so assembly:single does nothing in here-->
         <artifactId>maven-assembly-plugin</artifactId>
@@ -44,156 +46,166 @@
         </configuration>
       </plugin>
         <!-- Make a jar and put the sources in the jar -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-source-plugin</artifactId>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-checkstyle-plugin</artifactId>
-          <configuration>
-            <failOnViolation>true</failOnViolation>
-          </configuration>
-        </plugin>
-        <plugin>
-          <groupId>net.revelc.code</groupId>
-          <artifactId>warbucks-maven-plugin</artifactId>
-        </plugin>
-      </plugins>
-    </build>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <configuration>
+          <failOnViolation>true</failOnViolation>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
 
-    <dependencies>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-annotations</artifactId>
-        <type>test-jar</type>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-logging</artifactId>
-        <type>test-jar</type>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-common</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-common</artifactId>
-        <type>test-jar</type>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-metrics</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-metrics-api</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hbase.thirdparty</groupId>
-        <artifactId>hbase-shaded-miscellaneous</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <exclusions>
-          <exclusion>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-common</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-api</artifactId>
-      </dependency>
-      <dependency>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-annotations</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-logging</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-metrics</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-metrics-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.thirdparty</groupId>
+      <artifactId>hbase-shaded-miscellaneous</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
       <!--
         a missing transitive dependency on JDK9+ (obsoleted by Hadoop-3.3.0+, HADOOP-15775)
       -->
-        <groupId>javax.activation</groupId>
-        <artifactId>javax.activation-api</artifactId>
-        <scope>runtime</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.commons</groupId>
-        <artifactId>commons-lang3</artifactId>
-      </dependency>
-      <dependency>
-        <groupId>junit</groupId>
-        <artifactId>junit</artifactId>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>jcl-over-slf4j</artifactId>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>jul-to-slf4j</artifactId>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-log4j12</artifactId>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <scope>test</scope>
-      </dependency>
-    </dependencies>
+      <groupId>javax.activation</groupId>
+      <artifactId>javax.activation-api</artifactId>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jcl-over-slf4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jul-to-slf4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 
-    <profiles>
+  <profiles>
         <!-- Skip the tests in this module -->
-        <profile>
-            <id>skipHadoopCompatTests</id>
-            <activation>
-                <property>
-                    <name>skipHadoopCompatTests</name>
-                </property>
-            </activation>
-            <properties>
-                <surefire.skipFirstPart>true</surefire.skipFirstPart>
-                <surefire.skipSecondPart>true</surefire.skipSecondPart>
-            </properties>
-        </profile>
-      <profile>
-        <id>eclipse-specific</id>
-        <activation>
-          <property>
-            <name>m2e.version</name>
-          </property>
-        </activation>
-        <build>
-          <pluginManagement>
-            <plugins>
+    <profile>
+      <id>skipHadoopCompatTests</id>
+      <activation>
+        <property>
+          <name>skipHadoopCompatTests</name>
+        </property>
+      </activation>
+      <properties>
+        <surefire.skipFirstPart>true</surefire.skipFirstPart>
+        <surefire.skipSecondPart>true</surefire.skipSecondPart>
+      </properties>
+    </profile>
+    <profile>
+      <id>eclipse-specific</id>
+      <activation>
+        <property>
+          <name>m2e.version</name>
+        </property>
+      </activation>
+      <build>
+        <pluginManagement>
+          <plugins>
               <!--This plugin's configuration is used to store Eclipse m2e settings
                    only. It has no influence on the Maven build itself.-->
-              <plugin>
-                <groupId>org.eclipse.m2e</groupId>
-                <artifactId>lifecycle-mapping</artifactId>
-                <configuration>
-                  <lifecycleMappingMetadata>
-                    <pluginExecutions>
-                    </pluginExecutions>
-                  </lifecycleMappingMetadata>
-                </configuration>
-              </plugin>
-            </plugins>
-          </pluginManagement>
-        </build>
-      </profile>
-    </profiles>
+            <plugin>
+              <groupId>org.eclipse.m2e</groupId>
+              <artifactId>lifecycle-mapping</artifactId>
+              <configuration>
+                <lifecycleMappingMetadata>
+                  <pluginExecutions>
+                  </pluginExecutions>
+                </lifecycleMappingMetadata>
+              </configuration>
+            </plugin>
+          </plugins>
+        </pluginManagement>
+      </build>
+    </profile>
+  </profiles>
 
 </project>
diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml
index 2a1fd38..07542e0 100644
--- a/hbase-hbtop/pom.xml
+++ b/hbase-hbtop/pom.xml
@@ -92,13 +92,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index 51ce06c..8e3251d 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -241,13 +241,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index 1fcfa13..91b2615 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -302,8 +301,7 @@ public final class LogLevel {
   /**
    * A servlet implementation
    */
-  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-  @InterfaceStability.Unstable
+  @InterfaceAudience.Private
   public static class Servlet extends HttpServlet {
     private static final long serialVersionUID = 1L;
 
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
index 70ce5ec..92dc20d 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
@@ -31,8 +31,7 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
- * Utility functions for reading the log4j logs that are
- * being written by HBase.
+ * Utility functions for reading the log4j logs that are being written by HBase.
  */
 @InterfaceAudience.Private
 public abstract class LogMonitoring {
@@ -54,13 +53,12 @@ public abstract class LogMonitoring {
     }
   }
 
-  private static void dumpTailOfLog(File f, PrintWriter out, long tailKb)
-      throws IOException {
+  private static void dumpTailOfLog(File f, PrintWriter out, long tailKb) throws IOException {
     FileInputStream fis = new FileInputStream(f);
     BufferedReader r = null;
     try {
       FileChannel channel = fis.getChannel();
-      channel.position(Math.max(0, channel.size() - tailKb*1024));
+      channel.position(Math.max(0, channel.size() - tailKb * 1024));
       r = new BufferedReader(new InputStreamReader(fis));
       r.readLine(); // skip the first partial line
       String line;
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
index 2c5d0c4..4b8bb6b 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+
 import java.io.File;
 import java.net.BindException;
 import java.net.SocketException;
@@ -51,9 +52,6 @@ import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -63,11 +61,11 @@ import org.junit.experimental.categories.Category;
 /**
  * Test LogLevel.
  */
-@Category({MiscTests.class, SmallTests.class})
+@Category({ MiscTests.class, SmallTests.class })
 public class TestLogLevel {
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestLogLevel.class);
+    HBaseClassTestRule.forClass(TestLogLevel.class);
 
   private static String keystoresDir;
   private static String sslConfDir;
@@ -75,9 +73,10 @@ public class TestLogLevel {
   private static Configuration clientConf;
   private static Configuration sslConf;
   private static final String logName = TestLogLevel.class.getName();
-  private static final Logger log = LogManager.getLogger(logName);
+  private static final org.apache.logging.log4j.Logger log =
+    org.apache.logging.log4j.LogManager.getLogger(logName);
   private final static String PRINCIPAL = "loglevel.principal";
-  private final static String KEYTAB  = "loglevel.keytab";
+  private final static String KEYTAB = "loglevel.keytab";
 
   private static MiniKdc kdc;
 
@@ -106,8 +105,7 @@ public class TestLogLevel {
   }
 
   /**
-   * Sets up {@link MiniKdc} for testing security.
-   * Copied from HBaseTestingUtility#setupMiniKdc().
+   * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc().
    */
   static private MiniKdc setupMiniKdc() throws Exception {
     Properties conf = MiniKdc.createConf();
@@ -125,7 +123,7 @@ public class TestLogLevel {
         kdc = new MiniKdc(conf, dir);
         kdc.start();
       } catch (BindException e) {
-        FileUtils.deleteDirectory(dir);  // clean directory
+        FileUtils.deleteDirectory(dir); // clean directory
         numTries++;
         if (numTries == 3) {
           log.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
@@ -151,15 +149,15 @@ public class TestLogLevel {
   }
 
   /**
-   * Get the SSL configuration.
-   * This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop.
+   * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in
+   * Hadoop.
    * @return {@link Configuration} instance with ssl configs loaded.
    * @param conf to pull client/server SSL settings filename from
    */
-  private static Configuration getSslConfig(Configuration conf){
+  private static Configuration getSslConfig(Configuration conf) {
     Configuration sslConf = new Configuration(false);
     String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY);
-    String sslClientConfFile =  conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
+    String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
     sslConf.addResource(sslServerConfFile);
     sslConf.addResource(sslClientConfFile);
     sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile);
@@ -184,36 +182,29 @@ public class TestLogLevel {
   public void testCommandOptions() throws Exception {
     final String className = this.getClass().getName();
 
-    assertFalse(validateCommand(new String[] {"-foo" }));
+    assertFalse(validateCommand(new String[] { "-foo" }));
     // fail due to insufficient number of arguments
     assertFalse(validateCommand(new String[] {}));
-    assertFalse(validateCommand(new String[] {"-getlevel" }));
-    assertFalse(validateCommand(new String[] {"-setlevel" }));
-    assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" }));
+    assertFalse(validateCommand(new String[] { "-getlevel" }));
+    assertFalse(validateCommand(new String[] { "-setlevel" }));
+    assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" }));
 
     // valid command arguments
-    assertTrue(validateCommand(
-        new String[] {"-getlevel", "foo.bar:8080", className }));
-    assertTrue(validateCommand(
-        new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
-    assertTrue(validateCommand(
-        new String[] {"-getlevel", "foo.bar:8080", className }));
-    assertTrue(validateCommand(
-        new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
+    assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
+    assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
+    assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
+    assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
 
     // fail due to the extra argument
-    assertFalse(validateCommand(
-        new String[] {"-getlevel", "foo.bar:8080", className, "blah" }));
-    assertFalse(validateCommand(
-        new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
-    assertFalse(validateCommand(
-        new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080",
-          className }));
+    assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" }));
+    assertFalse(
+      validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
+    assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel",
+      "foo.bar:8080", className }));
   }
 
   /**
    * Check to see if a command can be accepted.
-   *
    * @param args a String array of arguments
    * @return true if the command can be accepted, false if not.
    */
@@ -232,40 +223,32 @@ public class TestLogLevel {
   }
 
   /**
-   * Creates and starts a Jetty server binding at an ephemeral port to run
-   * LogLevel servlet.
+   * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet.
    * @param protocol "http" or "https"
    * @param isSpnego true if SPNEGO is enabled
    * @return a created HttpServer object
    * @throws Exception if unable to create or start a Jetty server
    */
-  private HttpServer createServer(String protocol, boolean isSpnego)
-      throws Exception {
-    HttpServer.Builder builder = new HttpServer.Builder()
-        .setName("..")
-        .addEndpoint(new URI(protocol + "://localhost:0"))
-        .setFindPort(true)
-        .setConf(serverConf);
+  private HttpServer createServer(String protocol, boolean isSpnego) throws Exception {
+    HttpServer.Builder builder = new HttpServer.Builder().setName("..")
+      .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf);
     if (isSpnego) {
       // Set up server Kerberos credentials.
       // Since the server may fall back to simple authentication,
       // use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
-      builder.setSecurityEnabled(true)
-          .setUsernameConfKey(PRINCIPAL)
-          .setKeytabConfKey(KEYTAB)
-          .setACL(new AccessControlList("client"));
+      builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB)
+        .setACL(new AccessControlList("client"));
     }
 
     // if using HTTPS, configure keystore/truststore properties.
     if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
-      builder = builder.
-          keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
-          .keyStore(sslConf.get("ssl.server.keystore.location"),
-              sslConf.get("ssl.server.keystore.password"),
-              sslConf.get("ssl.server.keystore.type", "jks"))
-          .trustStore(sslConf.get("ssl.server.truststore.location"),
-              sslConf.get("ssl.server.truststore.password"),
-              sslConf.get("ssl.server.truststore.type", "jks"));
+      builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
+        .keyStore(sslConf.get("ssl.server.keystore.location"),
+          sslConf.get("ssl.server.keystore.password"),
+          sslConf.get("ssl.server.keystore.type", "jks"))
+        .trustStore(sslConf.get("ssl.server.truststore.location"),
+          sslConf.get("ssl.server.truststore.password"),
+          sslConf.get("ssl.server.truststore.type", "jks"));
     }
 
     HttpServer server = builder.build();
@@ -274,31 +257,29 @@ public class TestLogLevel {
   }
 
   private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
-      final boolean isSpnego)
-      throws Exception {
-    testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, Level.DEBUG.toString());
+    final boolean isSpnego) throws Exception {
+    testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego,
+      org.apache.logging.log4j.Level.DEBUG.toString());
   }
 
   /**
    * Run both client and server using the given protocol.
-   *
    * @param bindProtocol specify either http or https for server
    * @param connectProtocol specify either http or https for client
    * @param isSpnego true if SPNEGO is enabled
    * @throws Exception if client can't accesss server.
    */
   private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
-      final boolean isSpnego, final String newLevel)
-      throws Exception {
+    final boolean isSpnego, final String newLevel) throws Exception {
     if (!LogLevel.isValidProtocol(bindProtocol)) {
       throw new Exception("Invalid server protocol " + bindProtocol);
     }
     if (!LogLevel.isValidProtocol(connectProtocol)) {
       throw new Exception("Invalid client protocol " + connectProtocol);
     }
-    Level oldLevel = log.getEffectiveLevel();
+    org.apache.logging.log4j.Level oldLevel = log.getLevel();
     assertNotEquals("Get default Log Level which shouldn't be ERROR.",
-        Level.ERROR, oldLevel);
+      org.apache.logging.log4j.Level.ERROR, oldLevel);
 
     // configs needed for SPNEGO at server side
     if (isSpnego) {
@@ -319,8 +300,8 @@ public class TestLogLevel {
 
     String keytabFilePath = keyTabFile.getAbsolutePath();
 
-    UserGroupInformation clientUGI = UserGroupInformation.
-        loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
+    UserGroupInformation clientUGI =
+      UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
     try {
       clientUGI.doAs((PrivilegedExceptionAction<Void>) () -> {
         // client command line
@@ -334,44 +315,38 @@ public class TestLogLevel {
     }
 
     // restore log level
-    GenericTestUtils.setLogLevel(log, oldLevel);
+    org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel);
   }
 
   /**
-   * Run LogLevel command line to start a client to get log level of this test
-   * class.
-   *
+   * Run LogLevel command line to start a client to get log level of this test class.
    * @param protocol specify either http or https
    * @param authority daemon's web UI address
    * @throws Exception if unable to connect
    */
   private void getLevel(String protocol, String authority) throws Exception {
-    String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol};
+    String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol };
     CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
     cli.run(getLevelArgs);
   }
 
   /**
-   * Run LogLevel command line to start a client to set log level of this test
-   * class to debug.
-   *
+   * Run LogLevel command line to start a client to set log level of this test class to debug.
    * @param protocol specify either http or https
    * @param authority daemon's web UI address
    * @throws Exception if unable to run or log level does not change as expected
    */
-  private void setLevel(String protocol, String authority, String newLevel)
-      throws Exception {
-    String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol};
+  private void setLevel(String protocol, String authority, String newLevel) throws Exception {
+    String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol };
     CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
     cli.run(setLevelArgs);
 
     assertEquals("new level not equal to expected: ", newLevel.toUpperCase(),
-        log.getEffectiveLevel().toString());
+      log.getLevel().toString());
   }
 
   /**
    * Test setting log level to "Info".
-   *
    * @throws Exception if client can't set log level to INFO.
    */
   @Test
@@ -381,7 +356,6 @@ public class TestLogLevel {
 
   /**
    * Test setting log level to "Error".
-   *
    * @throws Exception if client can't set log level to ERROR.
    */
   @Test
@@ -391,18 +365,15 @@ public class TestLogLevel {
 
   /**
    * Server runs HTTP, no SPNEGO.
-   *
-   * @throws Exception if http client can't access http server,
-   *   or http client can access https server.
+   * @throws Exception if http client can't access http server, or http client can access https
+   *           server.
    */
   @Test
   public void testLogLevelByHttp() throws Exception {
     testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false);
     try {
-      testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
-          false);
-      fail("An HTTPS Client should not have succeeded in connecting to a " +
-          "HTTP server");
+      testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false);
+      fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
     } catch (SSLException e) {
       exceptionShouldContains("Unrecognized SSL message", e);
     }
@@ -410,18 +381,15 @@ public class TestLogLevel {
 
   /**
    * Server runs HTTP + SPNEGO.
-   *
-   * @throws Exception if http client can't access http server,
-   *   or http client can access https server.
+   * @throws Exception if http client can't access http server, or http client can access https
+   *           server.
    */
   @Test
   public void testLogLevelByHttpWithSpnego() throws Exception {
     testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true);
     try {
-      testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
-          true);
-      fail("An HTTPS Client should not have succeeded in connecting to a " +
-          "HTTP server");
+      testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true);
+      fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
     } catch (SSLException e) {
       exceptionShouldContains("Unrecognized SSL message", e);
     }
@@ -429,19 +397,15 @@ public class TestLogLevel {
 
   /**
    * Server runs HTTPS, no SPNEGO.
-   *
-   * @throws Exception if https client can't access https server,
-   *   or https client can access http server.
+   * @throws Exception if https client can't access https server, or https client can access http
+   *           server.
    */
   @Test
   public void testLogLevelByHttps() throws Exception {
-    testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
-        false);
+    testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false);
     try {
-      testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
-          false);
-      fail("An HTTP Client should not have succeeded in connecting to a " +
-          "HTTPS server");
+      testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false);
+      fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
     } catch (SocketException e) {
       exceptionShouldContains("Unexpected end of file from server", e);
     }
@@ -449,32 +413,27 @@ public class TestLogLevel {
 
   /**
    * Server runs HTTPS + SPNEGO.
-   *
-   * @throws Exception if https client can't access https server,
-   *   or https client can access http server.
+   * @throws Exception if https client can't access https server, or https client can access http
+   *           server.
    */
   @Test
   public void testLogLevelByHttpsWithSpnego() throws Exception {
-    testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
-        true);
+    testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true);
     try {
-      testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
-          true);
-      fail("An HTTP Client should not have succeeded in connecting to a " +
-          "HTTPS server");
-    }  catch (SocketException e) {
+      testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true);
+      fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
+    } catch (SocketException e) {
       exceptionShouldContains("Unexpected end of file from server", e);
     }
   }
 
   /**
-   * Assert that a throwable or one of its causes should contain the substr in its message.
-   *
-   * Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util
-   * method which asserts t.toString() contains the substr. As the original throwable may have been
-   * wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes.
-   * After stop supporting Hadoop2, this method can be removed and assertion in tests can use
-   * t.getCause() directly, similar to HADOOP-15280.
+   * Assert that a throwable or one of its causes should contain the substr in its message. Ideally
+   * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method
+   * which asserts t.toString() contains the substr. As the original throwable may have been wrapped
+   * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop
+   * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause()
+   * directly, similar to HADOOP-15280.
    */
   private static void exceptionShouldContains(String substr, Throwable throwable) {
     Throwable t = throwable;
@@ -486,6 +445,6 @@ public class TestLogLevel {
       t = t.getCause();
     }
     throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" +
-        StringUtils.stringifyException(throwable), throwable);
+      StringUtils.stringifyException(throwable), throwable);
   }
 }
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index f508ffd..83c5874 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -249,13 +249,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml
index d48ffca..c1409b3 100644
--- a/hbase-logging/pom.xml
+++ b/hbase-logging/pom.xml
@@ -38,7 +38,7 @@
       <testResource>
         <directory>src/test/resources</directory>
         <includes>
-          <include>log4j.properties</include>
+          <include>log4j2.xml</include>
         </includes>
       </testResource>
     </testResources>
@@ -80,13 +80,33 @@
     </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <artifactId>jcl-over-slf4j</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jul-to-slf4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>provided</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
index 28d29bf..b0711d7 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
+++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
@@ -19,16 +19,15 @@ package org.apache.hadoop.hbase.logging;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.Enumeration;
 import java.util.HashSet;
 import java.util.Set;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
- * The actual class for operating on log4j.
+ * The actual class for operating on log4j2.
  * <p/>
  * This class will depend on log4j directly, so callers should not use this class directly to avoid
- * introducing log4j dependencies to downstream users. Please call the methods in
+ * introducing log4j2 dependencies to downstream users. Please call the methods in
  * {@link Log4jUtils}, as they will call the methods here through reflection.
  */
 @InterfaceAudience.Private
@@ -38,32 +37,53 @@ final class InternalLog4jUtils {
   }
 
   static void setLogLevel(String loggerName, String levelName) {
-    org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
-    org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase());
+    org.apache.logging.log4j.Level level =
+      org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase());
     if (!level.toString().equalsIgnoreCase(levelName)) {
       throw new IllegalArgumentException("Unsupported log level " + levelName);
     }
-    logger.setLevel(level);
+    org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level);
   }
 
   static String getEffectiveLevel(String loggerName) {
-    org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
-    return logger.getEffectiveLevel().toString();
+    org.apache.logging.log4j.Logger logger =
+      org.apache.logging.log4j.LogManager.getLogger(loggerName);
+    return logger.getLevel().name();
   }
 
   static Set<File> getActiveLogFiles() throws IOException {
     Set<File> ret = new HashSet<>();
-    org.apache.log4j.Appender a;
-    @SuppressWarnings("unchecked")
-    Enumeration<org.apache.log4j.Appender> e =
-      org.apache.log4j.Logger.getRootLogger().getAllAppenders();
-    while (e.hasMoreElements()) {
-      a = e.nextElement();
-      if (a instanceof org.apache.log4j.FileAppender) {
-        org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a;
-        String filename = fa.getFile();
-        ret.add(new File(filename));
-      }
+    org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+    if (!(logger instanceof org.apache.logging.log4j.core.Logger)) {
+      return ret;
+    }
+    org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
+    for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) {
+      if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) {
+        String fileName =
+          ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName();
+        ret.add(new File(fileName));
+      } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) {
+        String fileName =
+          ((org.apache.logging.log4j.core.appender.AbstractFileAppender<?>) appender).getFileName();
+        ret.add(new File(fileName));
+      } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) {
+        String fileName =
+          ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName();
+        ret.add(new File(fileName));
+      } else
+        if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) {
+          String fileName =
+            ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender)
+              .getFileName();
+          ret.add(new File(fileName));
+        } else
+          if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) {
+            String fileName =
+              ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender)
+                .getFileName();
+            ret.add(new File(fileName));
+          }
     }
     return ret;
   }
diff --git a/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java
new file mode 100644
index 0000000..7b3876c
--- /dev/null
+++ b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.log4j;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.Writer;
+
+/**
+ * Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs
+ * this class but the log4j-1.2-api bridge does not provide it which causes the UTs in
+ * hbase-mapreduce module to fail if we start a separated MR cluster.
+ */
+public class FileAppender extends WriterAppender {
+
+  /**
+   * Controls file truncatation. The default value for this variable is <code>true</code>, meaning
+   * that by default a <code>FileAppender</code> will append to an existing file and not truncate
+   * it.
+   * <p>
+   * This option is meaningful only if the FileAppender opens the file.
+   */
+  protected boolean fileAppend = true;
+
+  /**
+   * The name of the log file.
+   */
+  protected String fileName = null;
+
+  /**
+   * Do we do bufferedIO?
+   */
+  protected boolean bufferedIO = false;
+
+  /**
+   * Determines the size of IO buffer be. Default is 8K.
+   */
+  protected int bufferSize = 8 * 1024;
+
+  /**
+   * The default constructor does not do anything.
+   */
+  public FileAppender() {
+  }
+
+  /**
+   * Instantiate a <code>FileAppender</code> and open the file designated by <code>fileName</code>.
+   * The opened filename will become the output destination for this appender.
+   * <p>
+   * If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
+   * designated by <code>fileName</code> will be truncated before being opened.
+   * <p>
+   * If the <code>bufferedIO</code> parameter is <code>true</code>, then buffered IO will be used to
+   * write to the output file.
+   */
+  public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO,
+    int bufferSize) throws IOException {
+    this.layout = layout;
+    this.setFile(fileName, append, bufferedIO, bufferSize);
+  }
+
+  /**
+   * Instantiate a FileAppender and open the file designated by <code>fileName</code>. The opened
+   * filename will become the output destination for this appender.
+   * <p>
+   * If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
+   * designated by <code>fileName</code> will be truncated before being opened.
+   */
+  public FileAppender(Layout layout, String fileName, boolean append) throws IOException {
+    this.layout = layout;
+    this.setFile(fileName, append, false, bufferSize);
+  }
+
+  /**
+   * Instantiate a FileAppender and open the file designated by <code>filename</code>. The opened
+   * filename will become the output destination for this appender.
+   * <p>
+   * The file will be appended to.
+   */
+  public FileAppender(Layout layout, String fileName) throws IOException {
+    this(layout, fileName, true);
+  }
+
+  /**
+   * The <b>File</b> property takes a string value which should be the name of the file to append
+   * to.
+   * <p>
+   * <font color="#DD0044"><b>Note that the special values "System.out" or "System.err" are no
+   * longer honored.</b></font>
+   * <p>
+   * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+   * options are set.
+   */
+  public void setFile(String file) {
+    // Trim spaces from both ends. The users probably does not want
+    // trailing spaces in file names.
+    String val = file.trim();
+    fileName = val;
+  }
+
+  /**
+   * Returns the value of the <b>Append</b> option.
+   */
+  public boolean getAppend() {
+    return fileAppend;
+  }
+
+  /** Returns the value of the <b>File</b> option. */
+  public String getFile() {
+    return fileName;
+  }
+
+  /**
+   * If the value of <b>File</b> is not <code>null</code>, then {@link #setFile} is called with the
+   * values of <b>File</b> and <b>Append</b> properties.
+   * @since 0.8.1
+   */
+  @Override
+  public void activateOptions() {
+    if (fileName != null) {
+      try {
+        setFile(fileName, fileAppend, bufferedIO, bufferSize);
+      } catch (java.io.IOException e) {
+        errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e,
+          org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE);
+      }
+    }
+  }
+
+  /**
+   * Closes the previously opened file.
+   */
+  protected void closeFile() {
+    if (this.qw != null) {
+      try {
+        this.qw.close();
+      } catch (java.io.IOException e) {
+        if (e instanceof InterruptedIOException) {
+          Thread.currentThread().interrupt();
+        }
+        // Exceptionally, it does not make sense to delegate to an
+        // ErrorHandler. Since a closed appender is basically dead.
+      }
+    }
+  }
+
+  /**
+   * Get the value of the <b>BufferedIO</b> option.
+   * <p>
+   * BufferedIO will significatnly increase performance on heavily loaded systems.
+   */
+  public boolean getBufferedIO() {
+    return this.bufferedIO;
+  }
+
+  /**
+   * Get the size of the IO buffer.
+   */
+  public int getBufferSize() {
+    return this.bufferSize;
+  }
+
+  /**
+   * The <b>Append</b> option takes a boolean value. It is set to <code>true</code> by default. If
+   * true, then <code>File</code> will be opened in append mode by {@link #setFile setFile} (see
+   * above). Otherwise, {@link #setFile setFile} will open <code>File</code> in truncate mode.
+   * <p>
+   * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+   * options are set.
+   */
+  public void setAppend(boolean flag) {
+    fileAppend = flag;
+  }
+
+  /**
+   * The <b>BufferedIO</b> option takes a boolean value. It is set to <code>false</code> by default.
+   * If true, then <code>File</code> will be opened and the resulting {@link java.io.Writer} wrapped
+   * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily
+   * loaded systems.
+   */
+  public void setBufferedIO(boolean bufferedIO) {
+    this.bufferedIO = bufferedIO;
+    if (bufferedIO) {
+      immediateFlush = false;
+    }
+  }
+
+  /**
+   * Set the size of the IO buffer.
+   */
+  public void setBufferSize(int bufferSize) {
+    this.bufferSize = bufferSize;
+  }
+
+  /**
+   * <p>
+   * Sets and <i>opens</i> the file where the log output will go. The specified file must be
+   * writable.
+   * <p>
+   * If there was already an opened file, then the previous file is closed first.
+   * <p>
+   * <b>Do not use this method directly. To configure a FileAppender or one of its subclasses, set
+   * its properties one by one and then call activateOptions.</b>
+   * @param fileName The path to the log file.
+   * @param append If true will append to fileName. Otherwise will truncate fileName.
+   */
+  public synchronized void setFile(String fileName, boolean append, boolean bufferedIO,
+    int bufferSize) throws IOException {
+
+    // It does not make sense to have immediate flush and bufferedIO.
+    if (bufferedIO) {
+      setImmediateFlush(false);
+    }
+
+    reset();
+    FileOutputStream ostream = null;
+    try {
+      //
+      // attempt to create file
+      //
+      ostream = new FileOutputStream(fileName, append);
+    } catch (FileNotFoundException ex) {
+      //
+      // if parent directory does not exist then
+      // attempt to create it and try to create file
+      // see bug 9150
+      //
+      String parentName = new File(fileName).getParent();
+      if (parentName != null) {
+        File parentDir = new File(parentName);
+        if (!parentDir.exists() && parentDir.mkdirs()) {
+          ostream = new FileOutputStream(fileName, append);
+        } else {
+          throw ex;
+        }
+      } else {
+        throw ex;
+      }
+    }
+    Writer fw = createWriter(ostream);
+    if (bufferedIO) {
+      fw = new BufferedWriter(fw, bufferSize);
+    }
+    this.setQWForFiles(fw);
+    this.fileName = fileName;
+    this.fileAppend = append;
+    this.bufferedIO = bufferedIO;
+    this.bufferSize = bufferSize;
+    writeHeader();
+  }
+
+  /**
+   * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}.
+   */
+  protected void setQWForFiles(Writer writer) {
+    this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler);
+  }
+
+  /**
+   * Close any previously opened file and call the parent's <code>reset</code>.
+   */
+  @Override
+  protected void reset() {
+    closeFile();
+    this.fileName = null;
+    super.reset();
+  }
+}
diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties
deleted file mode 100644
index c322699..0000000
--- a/hbase-logging/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
diff --git a/hbase-logging/src/test/resources/log4j2.xml b/hbase-logging/src/test/resources/log4j2.xml
new file mode 100644
index 0000000..643fae6
--- /dev/null
+++ b/hbase-logging/src/test/resources/log4j2.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<Configuration>
+  <Appenders>
+    <Console name="Console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n" />
+    </Console>
+  </Appenders>
+  <Loggers>
+    <Root level="info">
+      <AppenderRef ref="Console" />
+    </Root>
+    <Logger name="org.apache.hadoop" level="warn" />
+    <Logger name="org.apache.zookeeper" level="error" />
+    <Logger name="org.apache.hadoop.hbase" level="debug" />
+    <!-- These settings are workarounds against spurious logs from the minicluster. See HBASE-4709 -->
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
+    <Logger name="org.apache.hadoop.metrics2.util.MBeans" level="warn" />
+    <Logger name="org.apache.directory" level="warn" additivity="false" />
+    <Logger name="org.apache.hbase.thirdparty.io.netty.channel" level="debug" />
+    <!-- For testing where we want to capture the log message of these special loggers -->
+    <Logger name="org.apache.hadoop.hbase.ipc.FailedServers" level="debug" />
+    <Logger name="org.apache.hadoop.hbase.regionserver.RSRpcServices" level="debug" />
+  </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 5bd03f4..65042aa 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -260,13 +260,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 63c1760..7614b83 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -25,24 +25,16 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.atomic.AtomicReference;
-
 import javax.crypto.spec.SecretKeySpec;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.zookeeper.ZooKeeper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Durability;
@@ -52,6 +44,7 @@ import org.apache.hadoop.hbase.io.crypto.Cipher;
 import org.apache.hadoop.hbase.io.crypto.Encryption;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.security.EncryptionUtil;
 import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@@ -61,6 +54,10 @@ import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.ZooKeeper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@@ -583,7 +580,7 @@ public class LoadTestTool extends AbstractHBaseTool {
   @Override
   protected int doWork() throws IOException {
     if (!isVerbose) {
-        LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN);
+      Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN");
     }
     if (numTables > 1) {
       return parallelLoadTables();
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index ec8bb4d..0db0458 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -133,13 +133,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 9c2bcc2..0496cc3 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -141,13 +141,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index 13cfc73..5cf0270 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -130,13 +130,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index f56ca62..7575fd1 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -138,13 +138,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index d583d7a..2cf40cf 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -354,13 +354,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 04df64e..8588357 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -494,13 +494,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 29e8883..a036f93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -639,7 +639,6 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
    * This is used before starting HDFS and map-reduce mini-clusters Run something like the below to
    * check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in
    * the conf.
-   *
    * <pre>
    * Configuration conf = TEST_UTIL.getConfiguration();
    * for (Iterator&lt;Map.Entry&lt;String, String&gt;&gt; i = conf.iterator(); i.hasNext();) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
index 57cfbec..028b8fd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
@@ -29,11 +29,10 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately {
 
   private static AsyncConnection CONN;
 
+  private static String LOG_LEVEL;
+
   @BeforeClass
   public static void setUp() throws Exception {
     // disable the debug log to avoid flooding the output
-    LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
     UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
     UTIL.startMiniCluster(1);
     Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@@ -79,6 +81,9 @@ public class TestAsyncTableBatchRetryImmediately {
 
   @AfterClass
   public static void tearDown() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     CONN.close();
     UTIL.shutdownMiniCluster();
   }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
index eaf1f9d..4818b6d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
@@ -33,14 +33,13 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -65,6 +64,7 @@ public class TestMultiRespectsLimits {
       CompatibilityFactory.getInstance(MetricsAssertHelper.class);
   private final static byte[] FAMILY = Bytes.toBytes("D");
   public static final int MAX_SIZE = 100;
+  private static String LOG_LEVEL;
 
   @Rule
   public TestName name = new TestName();
@@ -72,7 +72,8 @@ public class TestMultiRespectsLimits {
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     // disable the debug log to avoid flooding the output
-    LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
     TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
       MAX_SIZE);
 
@@ -82,6 +83,9 @@ public class TestMultiRespectsLimits {
 
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     TEST_UTIL.shutdownMiniCluster();
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 941d921..a45804a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.RPCTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.After;
@@ -82,10 +83,8 @@ public class TestProtoBufRpc {
     this.conf = HBaseConfiguration.create();
     this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
         rpcServerImpl);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer")
-      .setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
-      .setLevel(org.apache.log4j.Level.TRACE);
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
     // Create server side implementation
     // Get RPC server for server side implementation
     this.server = RpcServerFactory.createRpcServer(null, "testrpc",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
index 2d66106..1225175 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
@@ -35,17 +35,19 @@ import org.mockito.Mockito;
 public class TestRpcServerTraceLogging {
 
   @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule
-      .forClass(TestRpcServerTraceLogging.class);
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class);
 
-  static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class);
+  private static final org.apache.logging.log4j.core.Logger rpcServerLog =
+    (org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RpcServer.class);
 
   static final String TRACE_LOG_MSG =
-      "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }"
-          + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } "
-      + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } "
-      + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: "
-      + "true client_handles_heartbeats: true track_scan_metrics: false";
+    "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" +
+      " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " +
+      "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " +
+      "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " +
+      "true client_handles_heartbeats: true track_scan_metrics: false";
 
   static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length();
 
@@ -62,7 +64,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOff() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -72,7 +74,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOn() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -82,7 +84,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOnLargeMax() {
     conf.setInt("hbase.ipc.trace.log.max.length", 2000);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(TRACE_LOG_LENGTH, truncatedString.length());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
index 26de198..75ad7c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
@@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -47,8 +45,9 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
@@ -80,7 +79,7 @@ public class TestMultiLogThreshold {
   private HRegionServer rs;
   private RSRpcServices services;
 
-  private Appender appender;
+  private org.apache.logging.log4j.core.Appender appender;
 
   @Parameterized.Parameter
   public static boolean rejectLargeBatchOp;
@@ -90,6 +89,21 @@ public class TestMultiLogThreshold {
     return Arrays.asList(new Object[] { false }, new Object[] { true });
   }
 
+  private final class LevelAndMessage {
+    final org.apache.logging.log4j.Level level;
+
+    final String msg;
+
+    public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) {
+      this.level = level;
+      this.msg = msg;
+    }
+
+  }
+
+  // log4j2 will reuse the LogEvent so we need to copy the level and message out.
+  private BlockingDeque<LevelAndMessage> logs = new LinkedBlockingDeque<>();
+
   @Before
   public void setupTest() throws Exception {
     util = new HBaseTestingUtility();
@@ -100,13 +114,28 @@ public class TestMultiLogThreshold {
     util.startMiniCluster();
     util.createTable(NAME, TEST_FAM);
     rs = util.getRSForFirstRegionInTable(NAME);
-    appender = mock(Appender.class);
-    LogManager.getLogger(RSRpcServices.class).addAppender(appender);
+    appender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(appender.getName()).thenReturn("mockAppender");
+    when(appender.isStarted()).thenReturn(true);
+    doAnswer(new Answer<Void>() {
+
+      @Override
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        org.apache.logging.log4j.core.LogEvent logEvent =
+          invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
+        logs.add(
+          new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage()));
+        return null;
+      }
+    }).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class));
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).addAppender(appender);
   }
 
   @After
   public void tearDown() throws Exception {
-    LogManager.getLogger(RSRpcServices.class).removeAppender(appender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).removeAppender(appender);
     util.shutdownMiniCluster();
   }
 
@@ -149,17 +178,16 @@ public class TestMultiLogThreshold {
   }
 
   private void assertLogBatchWarnings(boolean expected) {
-    ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class);
-    verify(appender, atLeastOnce()).doAppend(captor.capture());
+    assertFalse(logs.isEmpty());
     boolean actual = false;
-    for (LoggingEvent event : captor.getAllValues()) {
-      if (event.getLevel() == Level.WARN &&
-        event.getRenderedMessage().contains("Large batch operation detected")) {
+    for (LevelAndMessage event : logs) {
+      if (event.level == org.apache.logging.log4j.Level.WARN &&
+        event.msg.contains("Large batch operation detected")) {
         actual = true;
         break;
       }
     }
-    reset(appender);
+    logs.clear();
     assertEquals(expected, actual);
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
index da39551..b3c2eb6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
@@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
 import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
 import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.Before;
@@ -56,6 +51,8 @@ import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 @Category(LargeTests.class)
 public class TestRegionServerReportForDuty {
 
@@ -91,26 +88,15 @@ public class TestRegionServerReportForDuty {
     testUtil.shutdownMiniDFSCluster();
   }
 
-  /**
-   * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
-   * except that this implementation has a default appender to the root logger.
-   * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
-   * TODO: This class can be removed after we upgrade Hadoop dependency.
-   */
-  static class LogCapturer {
+  private static class LogCapturer {
     private StringWriter sw = new StringWriter();
-    private WriterAppender appender;
-    private org.apache.log4j.Logger logger;
+    private org.apache.logging.log4j.core.appender.WriterAppender appender;
+    private org.apache.logging.log4j.core.Logger logger;
 
-    LogCapturer(org.apache.log4j.Logger logger) {
+    LogCapturer(org.apache.logging.log4j.core.Logger logger) {
       this.logger = logger;
-      Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
-      if (defaultAppender == null) {
-        defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
-      }
-      final Layout layout = (defaultAppender == null) ? new PatternLayout() :
-          defaultAppender.getLayout();
-      this.appender = new WriterAppender(layout, sw);
+      this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder()
+        .setName("test").setTarget(sw).build();
       this.logger.addAppender(this.appender);
     }
 
@@ -146,7 +132,9 @@ public class TestRegionServerReportForDuty {
     master = cluster.addMaster();
     master.start();
 
-    LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class));
+    LogCapturer capturer =
+      new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+        .getLogger(HRegionServer.class));
     // Set sleep interval relatively low so that exponential backoff is more demanding.
     int msginterval = 100;
     cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 314b966..31b95ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -26,11 +26,11 @@ import java.lang.reflect.InvocationTargetException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
@@ -132,12 +132,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
     this.ratio = inRatio;
 
     // Hide lots of logging so the system out is usable as a tab delimited file.
-    org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-
-    org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
+    Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
 
 
     Configuration configuration = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index d87fc3e..6d5e81a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.argThat;
 import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.ArgumentMatchers.isA;
 import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Appender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -60,19 +59,14 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
 import org.mockito.ArgumentMatcher;
-import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
-import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 
-@RunWith(MockitoJUnitRunner.class)
-@Category({LargeTests.class})
+@Category({ LargeTests.class })
 public class TestCanaryTool {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestCanaryTool.class);
+    HBaseClassTestRule.forClass(TestCanaryTool.class);
 
   private HBaseTestingUtility testingUtility;
   private static final byte[] FAMILY = Bytes.toBytes("f");
@@ -81,22 +75,26 @@ public class TestCanaryTool {
   @Rule
   public TestName name = new TestName();
 
+  private org.apache.logging.log4j.core.Appender mockAppender;
+
   @Before
   public void setUp() throws Exception {
     testingUtility = new HBaseTestingUtility();
     testingUtility.startMiniCluster();
-    LogManager.getRootLogger().addAppender(mockAppender);
+    mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(mockAppender.getName()).thenReturn("mockAppender");
+    when(mockAppender.isStarted()).thenReturn(true);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
   }
 
   @After
   public void tearDown() throws Exception {
     testingUtility.shutdownMiniCluster();
-    LogManager.getRootLogger().removeAppender(mockAppender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
   }
 
-  @Mock
-  Appender mockAppender;
-
   @Test
   public void testBasicZookeeperCanaryWorks() throws Exception {
     final String[] args = { "-t", "10000", "-zookeeper" };
@@ -105,8 +103,8 @@ public class TestCanaryTool {
 
   @Test
   public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception {
-    final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError",
-      "-permittedZookeeperFailures", "1" };
+    final String[] args =
+      { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
     testZookeeperCanaryWithArgs(args);
   }
 
@@ -115,7 +113,7 @@ public class TestCanaryTool {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -156,7 +154,7 @@ public class TestCanaryTool {
       // the test table has two column family. If readAllCF set true,
       // we expect read count is double of region count
       int expectedReadCount =
-          readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
+        readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
       assertEquals("canary region success count should equal total expected read count",
         expectedReadCount, sink.getReadSuccessCount());
       Map<String, List<CanaryTool.RegionTaskResult>> regionMap = sink.getRegionMap();
@@ -184,7 +182,7 @@ public class TestCanaryTool {
     TableName tableName = TableName.valueOf("testCanaryRegionTaskResult");
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -197,23 +195,23 @@ public class TestCanaryTool {
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
 
     assertTrue("canary should expect to scan at least 1 region",
-        sink.getTotalExpectedRegions() > 0);
+      sink.getTotalExpectedRegions() > 0);
     assertTrue("there should be no read failures", sink.getReadFailureCount() == 0);
     assertTrue("there should be no write failures", sink.getWriteFailureCount() == 0);
     assertTrue("verify read success count > 0", sink.getReadSuccessCount() > 0);
     assertTrue("verify write success count > 0", sink.getWriteSuccessCount() > 0);
     verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
-        isA(ColumnFamilyDescriptor.class), anyLong());
+      isA(ColumnFamilyDescriptor.class), anyLong());
     verify(sink, atLeastOnce()).publishWriteTiming(isA(ServerName.class), isA(RegionInfo.class),
-        isA(ColumnFamilyDescriptor.class), anyLong());
+      isA(ColumnFamilyDescriptor.class), anyLong());
 
     assertEquals("canary region success count should equal total expected regions",
-        sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions());
+      sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions());
     Map<String, List<CanaryTool.RegionTaskResult>> regionMap = sink.getRegionMap();
     assertFalse("verify region map has size > 0", regionMap.isEmpty());
 
     for (String regionName : regionMap.keySet()) {
-      for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) {
+      for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) {
         assertNotNull("verify getRegionNameAsString()", regionName);
         assertNotNull("verify getRegionInfo()", res.getRegionInfo());
         assertNotNull("verify getTableName()", res.getTableName());
@@ -236,24 +234,25 @@ public class TestCanaryTool {
 
   // Ignore this test. It fails w/ the below on some mac os x.
   // [ERROR] Failures:
-  // [ERROR]   TestCanaryTool.testReadTableTimeouts:216
+  // [ERROR] TestCanaryTool.testReadTableTimeouts:216
   // Argument(s) are different! Wanted:
   // mockAppender.doAppend(
   // <custom argument matcher>
-  //      );
-  //      -> at org.apache.hadoop.hbase.tool.TestCanaryTool
-  //          .testReadTableTimeouts(TestCanaryTool.java:216)
-  //      Actual invocations have different arguments:
-  //      mockAppender.doAppend(
-  //          org.apache.log4j.spi.LoggingEvent@2055cfc1
-  //          );
-  //      )
-  //  )
+  // );
+  // -> at org.apache.hadoop.hbase.tool.TestCanaryTool
+  // .testReadTableTimeouts(TestCanaryTool.java:216)
+  // Actual invocations have different arguments:
+  // mockAppender.doAppend(
+  // org.apache.log4j.spi.LoggingEvent@2055cfc1
+  // );
+  // )
+  // )
   //
-  @org.junit.Ignore @Test
+  @org.junit.Ignore
+  @Test
   public void testReadTableTimeouts() throws Exception {
-    final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"),
-      TableName.valueOf(name.getMethodName() + "2")};
+    final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"),
+      TableName.valueOf(name.getMethodName() + "2") };
     // Create 2 test tables.
     for (int j = 0; j < 2; j++) {
       Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY });
@@ -270,8 +269,8 @@ public class TestCanaryTool {
     CanaryTool canary = new CanaryTool(executor, sink);
     String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," +
       tableNames[1].getNameAsString() + "=0";
-    String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
-      name.getMethodName() + "2"};
+    String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
+      name.getMethodName() + "2" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class));
     for (int i = 0; i < 2; i++) {
@@ -281,18 +280,21 @@ public class TestCanaryTool {
         sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
     }
     // One table's timeout is set for 0 ms and thus, should lead to an error.
-    verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("exceeded the configured read timeout.");
-      }
-    }));
-    verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Configured read timeout");
-      }
-    }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("exceeded the configured read timeout.");
+        }
+      }));
+    verify(mockAppender, times(2))
+      .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
+        }
+      }));
   }
 
   @Test
@@ -300,43 +302,47 @@ public class TestCanaryTool {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink());
     CanaryTool canary = new CanaryTool(executor, sink);
-    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)};
+    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
     assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
-    verify(mockAppender, times(1)).doAppend(argThat(
-        new ArgumentMatcher<LoggingEvent>() {
-          @Override
-          public boolean matches(LoggingEvent argument) {
-            return argument.getRenderedMessage().contains("Configured write timeout");
-          }
-        }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
+        }
+      }));
   }
 
-  //no table created, so there should be no regions
+  // no table created, so there should be no regions
   @Test
   public void testRegionserverNoRegions() throws Exception {
     runRegionserverCanary();
-    verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender)
+      .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
-  //by creating a table, there shouldn't be any region servers not serving any regions
+  // by creating a table, there shouldn't be any region servers not serving any regions
   @Test
   public void testRegionserverWithRegions() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     testingUtility.createTable(tableName, new byte[][] { FAMILY });
     runRegionserverCanary();
-    verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender, never())
+      .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
   @Test
@@ -344,7 +350,7 @@ public class TestCanaryTool {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -358,23 +364,20 @@ public class TestCanaryTool {
       new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration());
     conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true);
     assertEquals(0, ToolRunner.run(conf, canary, args));
-    verify(sink, atLeastOnce())
-        .publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
-        isA(ColumnFamilyDescriptor.class), anyLong());
+    verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
+      isA(ColumnFamilyDescriptor.class), anyLong());
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void runRegionserverCanary() throws Exception {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink());
-    String[] args = { "-t", "10000", "-regionserver"};
+    String[] args = { "-t", "10000", "-regionserver" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
-    Integer port =
-      Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
     String hostPort = testingUtility.getZkCluster().getAddress().toString();
     testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort);
     ExecutorService executor = new ScheduledThreadPoolExecutor(2);
@@ -382,8 +385,8 @@ public class TestCanaryTool {
     CanaryTool canary = new CanaryTool(executor, sink);
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
 
-    String baseZnode = testingUtility.getConfiguration()
-      .get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
+    String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT,
+      HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
     verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong());
   }
 }
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index bd9b3f4..e61e163 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -46,12 +46,10 @@
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-shaded-mapreduce</artifactId>
-      <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-shaded-client-byo-hadoop</artifactId>
-      <version>${project.version}</version>
     </dependency>
     <!-- parent pom defines these for children. :( :( :( -->
     <dependency>
@@ -60,8 +58,18 @@
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>provided</scope>
     </dependency>
     <!-- Test dependencies -->
diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
index 090e068..18aaf71 100644
--- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
+++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
@@ -1,118 +1,120 @@
 <project xmlns="https://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <!--
-      /**
-       * Licensed to the Apache Software Foundation (ASF) under one
-       * or more contributor license agreements.  See the NOTICE file
-       * distributed with this work for additional information
-       * regarding copyright ownership.  The ASF licenses this file
-       * to you under the Apache License, Version 2.0 (the
-       * "License"); you may not use this file except in compliance
-       * with the License.  You may obtain a copy of the License at
-       *
-       *     http://www.apache.org/licenses/LICENSE-2.0
-       *
-       * Unless required by applicable law or agreed to in writing, software
-       * distributed under the License is distributed on an "AS IS" BASIS,
-       * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-       * See the License for the specific language governing permissions and
-       * limitations under the License.
-       */
-      -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-shaded</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>..</relativePath>
-    </parent>
-    <artifactId>hbase-shaded-client-byo-hadoop</artifactId>
-    <name>Apache HBase - Shaded - Client</name>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-site-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <!--Make it so assembly:single does nothing in here-->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <configuration>
-                    <skipAssembly>true</skipAssembly>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-    <dependencies>
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+<!--
+  /**
+   * Licensed to the Apache Software Foundation (ASF) under one
+   * or more contributor license agreements.  See the NOTICE file
+   * distributed with this work for additional information
+   * regarding copyright ownership.  The ASF licenses this file
+   * to you under the Apache License, Version 2.0 (the
+   * "License"); you may not use this file except in compliance
+   * with the License.  You may obtain a copy of the License at
+   *
+   *     http://www.apache.org/licenses/LICENSE-2.0
+   *
+   * Unless required by applicable law or agreed to in writing, software
+   * distributed under the License is distributed on an "AS IS" BASIS,
+   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   * See the License for the specific language governing permissions and
+   * limitations under the License.
+   */
+-->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-shaded</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+  <artifactId>hbase-shaded-client-byo-hadoop</artifactId>
+  <name>Apache HBase - Shaded - Client</name>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <!-- These hadoop profiles should be derived from those in the hbase-client
+         module. Essentially, you must list the same hadoop-* dependencies
+         so provided dependencies will not be transitively included.
+    -->
+    <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
+    <profile>
+      <id>hadoop-3.0</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <dependencies>
         <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-auth</artifactId>
+          <scope>provided</scope>
         </dependency>
-    </dependencies>
-
-    <profiles>
-      <!-- These hadoop profiles should be derived from those in the hbase-client
-           module. Essentially, you must list the same hadoop-* dependencies
-           so provided dependencies will not be transitively included.
-      -->
-      <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
-      <profile>
-        <id>hadoop-3.0</id>
-        <activation>
-          <property><name>!hadoop.profile</name></property>
-        </activation>
-        <dependencies>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-auth</artifactId>
-            <scope>provided</scope>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-          </dependency>
-          <dependency>
-            <groupId>org.codehaus.jackson</groupId>
-            <artifactId>jackson-jaxrs</artifactId>
-            <version>1.9.13</version>
-            <scope>provided</scope>
-            <exclusions>
-              <exclusion>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-mapper-asl</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-core-asl</artifactId>
-              </exclusion>
-            </exclusions>
-          </dependency>
-          <dependency>
-            <groupId>org.codehaus.jackson</groupId>
-            <artifactId>jackson-xc</artifactId>
-            <version>1.9.13</version>
-            <scope>provided</scope>
-            <exclusions>
-              <exclusion>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-mapper-asl</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-core-asl</artifactId>
-              </exclusion>
-            </exclusions>
-          </dependency>
-        </dependencies>
-      </profile>
-    </profiles>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-jaxrs</artifactId>
+          <version>1.9.13</version>
+          <scope>provided</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-mapper-asl</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-core-asl</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-xc</artifactId>
+          <version>1.9.13</version>
+          <scope>provided</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-mapper-asl</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-core-asl</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml
index 8cfc3f2..4c6ff0e 100644
--- a/hbase-shaded/hbase-shaded-client/pom.xml
+++ b/hbase-shaded/hbase-shaded-client/pom.xml
@@ -1,6 +1,6 @@
 <project xmlns="https://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <!--
       /**
        * Licensed to the Apache Software Foundation (ASF) under one
@@ -20,77 +20,76 @@
        * limitations under the License.
        */
       -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-shaded</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>..</relativePath>
-    </parent>
-    <artifactId>hbase-shaded-client</artifactId>
-    <name>Apache HBase - Shaded - Client (with Hadoop bundled)</name>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-site-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <!--Make it so assembly:single does nothing in here-->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <configuration>
-                    <skipAssembly>true</skipAssembly>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>aggregate-into-a-jar-with-relocated-third-parties</id>
-                        <configuration>
-                            <artifactSet>
-                                <excludes>
-                                    <!-- exclude J2EE modules that come in for JDK11+ (since
-                                         hadoop-3.2.0) or modules that come in for JDK8+ but
-                                         need not be included -->
-                                    <exclude>javax.annotation:javax.annotation-api</exclude>
-                                    <exclude>javax.activation:javax.activation-api</exclude>
-                                    <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
-                                    <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
-                                    <exclude>jakarta.validation:jakarta.validation-api</exclude>
-                                    <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
-                                    <!--
-                                      Tell the shade plugin that in this case we want to include hadoop
-                                      by leaving out the exclude.
-                                      -->
-                                    <!-- The rest of these should be kept in sync with the parent pom -->
-                                    <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
-                                    <exclude>org.slf4j:*</exclude>
-                                    <exclude>com.google.code.findbugs:*</exclude>
-                                    <exclude>com.github.stephenc.findbugs:*</exclude>
-                                    <exclude>com.github.spotbugs:*</exclude>
-                                    <exclude>org.apache.htrace:*</exclude>
-                                    <exclude>org.apache.yetus:*</exclude>
-                                    <exclude>log4j:*</exclude>
-                                    <exclude>commons-logging:*</exclude>
-                                    <exclude>org.javassist:*</exclude>
-                                </excludes>
-                            </artifactSet>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-        </dependency>
-    </dependencies>
-
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-shaded</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+  <artifactId>hbase-shaded-client</artifactId>
+  <name>Apache HBase - Shaded - Client (with Hadoop bundled)</name>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>aggregate-into-a-jar-with-relocated-third-parties</id>
+            <configuration>
+              <artifactSet>
+                <excludes>
+                  <!-- exclude J2EE modules that come in for JDK11+ (since
+                       hadoop-3.2.0) or modules that come in for JDK8+ but
+                       need not be included -->
+                  <exclude>javax.annotation:javax.annotation-api</exclude>
+                  <exclude>javax.activation:javax.activation-api</exclude>
+                  <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
+                  <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
+                  <exclude>jakarta.validation:jakarta.validation-api</exclude>
+                  <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
+                  <!--
+                    Tell the shade plugin that in this case we want to include hadoop
+                    by leaving out the exclude.
+                  -->
+                  <!-- The rest of these should be kept in sync with the parent pom -->
+                  <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
+                  <exclude>org.slf4j:*</exclude>
+                  <exclude>com.google.code.findbugs:*</exclude>
+                  <exclude>com.github.stephenc.findbugs:*</exclude>
+                  <exclude>com.github.spotbugs:*</exclude>
+                  <exclude>org.apache.htrace:*</exclude>
+                  <exclude>org.apache.yetus:*</exclude>
+                  <exclude>org.apache.logging.log4j:*</exclude>
+                  <exclude>commons-logging:*</exclude>
+                  <exclude>org.javassist:*</exclude>
+                </excludes>
+              </artifactSet>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
index 4fe7fe8..fb48adb 100644
--- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -1,6 +1,6 @@
 <project xmlns="https://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <!--
       /**
        * Licensed to the Apache Software Foundation (ASF) under one
@@ -20,236 +20,238 @@
        * limitations under the License.
        */
       -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-shaded</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>..</relativePath>
-    </parent>
-    <artifactId>hbase-shaded-mapreduce</artifactId>
-    <name>Apache HBase - Shaded - MapReduce</name>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-site-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <!--Make it so assembly:single does nothing in here-->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <configuration>
-                    <skipAssembly>true</skipAssembly>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <configuration>
-                    <archive>
-                        <manifest>
-                            <!--Include the Driver class as the 'main'.
-                                 Executing the jar will then show a list of the basic MR jobs.
-                                 -->
-                            <mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass>
-                        </manifest>
-                    </archive>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-    <dependencies>
-        <!--
-             We want to ensure needed hadoop bits are at provided scope for our shaded
-             artifact, so we list them below in hadoop specific profiles.
-          -->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-shaded</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+  <artifactId>hbase-shaded-mapreduce</artifactId>
+  <name>Apache HBase - Shaded - MapReduce</name>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <!-- Include the Driver class as the 'main'.
+                   Executing the jar will then show a list of the basic MR jobs.
+              -->
+              <mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass>
+            </manifest>
+          </archive>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <!--
+      We want to ensure needed hadoop bits are at provided scope for our shaded
+      artifact, so we list them below in hadoop specific profiles.
+    -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-mapreduce</artifactId>
+      <exclusions>
+        <!-- Jaxb-api is a part of Java SE now -->
+        <exclusion>
+          <groupId>javax.xml.bind</groupId>
+          <artifactId>jaxb-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.ws.rs</groupId>
+          <artifactId>jsr311-api</artifactId>
+        </exclusion>
+        <!-- Jersey not used by our MR support -->
+        <exclusion>
+          <groupId>javax.ws.rs</groupId>
+          <artifactId>javax.ws.rs-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-server</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey.contribs</groupId>
+          <artifactId>jersey-guice</artifactId>
+        </exclusion>
+        <!-- Jetty not used by our MR support -->
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>javax.servlet-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-http</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-security</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-server</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-servlet</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-util-ajax</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.glassfish</groupId>
+          <artifactId>javax.el</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-webapp</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.glassfish.jersey.core</groupId>
+          <artifactId>jersey-server</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.glassfish.jersey.containers</groupId>
+          <artifactId>jersey-container-servlet-core</artifactId>
+        </exclusion>
+        <!-- We excluded the server-side generated classes for JSP, so exclude
+             their runtime support libraries too
+        -->
+        <exclusion>
+          <groupId>org.glassfish.web</groupId>
+          <artifactId>javax.servlet.jsp</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>javax.servlet.jsp-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <!-- These hadoop profiles should be derived from those in the hbase-mapreduce
+         module. Essentially, you must list the same hadoop-* dependencies
+         since provided dependencies are not transitively included.
+    -->
+    <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
+    <profile>
+      <id>hadoop-3.0</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <properties>
+        <hadoop.version>${hadoop-three.version}</hadoop.version>
+      </properties>
+      <dependencies>
         <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-mapreduce</artifactId>
-            <exclusions>
-              <!-- Jaxb-api is a part of Java SE now -->
-              <exclusion>
-                <groupId>javax.xml.bind</groupId>
-                <artifactId>jaxb-api</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>javax.ws.rs</groupId>
-                <artifactId>jsr311-api</artifactId>
-              </exclusion>
-              <!-- Jersey not used by our MR support -->
-              <exclusion>
-                <groupId>javax.ws.rs</groupId>
-                <artifactId>javax.ws.rs-api</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>com.sun.jersey</groupId>
-                <artifactId>jersey-server</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>com.sun.jersey</groupId>
-                <artifactId>jersey-client</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>com.sun.jersey</groupId>
-                <artifactId>jersey-core</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>com.sun.jersey</groupId>
-                <artifactId>jersey-json</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>com.sun.jersey.contribs</groupId>
-                <artifactId>jersey-guice</artifactId>
-              </exclusion>
-              <!-- Jetty not used by our MR support -->
-              <exclusion>
-                <groupId>javax.servlet</groupId>
-                <artifactId>javax.servlet-api</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-http</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-security</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-server</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-servlet</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-util</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-util-ajax</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.glassfish</groupId>
-                <artifactId>javax.el</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-webapp</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.glassfish.jersey.core</groupId>
-                <artifactId>jersey-server</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.glassfish.jersey.containers</groupId>
-                <artifactId>jersey-container-servlet-core</artifactId>
-              </exclusion>
-              <!-- We excluded the server-side generated classes for JSP, so exclude
-                   their runtime support libraries too
-                -->
-              <exclusion>
-                <groupId>org.glassfish.web</groupId>
-                <artifactId>javax.servlet.jsp</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>javax.servlet.jsp</groupId>
-                <artifactId>javax.servlet.jsp-api</artifactId>
-              </exclusion>
-            </exclusions>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <scope>provided</scope>
         </dependency>
-    </dependencies>
-
-    <profiles>
-        <!-- These hadoop profiles should be derived from those in the hbase-mapreduce
-             module. Essentially, you must list the same hadoop-* dependencies
-             since provided dependencies are not transitively included.
-        -->
-        <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
-        <profile>
-          <id>hadoop-3.0</id>
-          <activation>
-            <property><name>!hadoop.profile</name></property>
-          </activation>
-          <properties>
-            <hadoop.version>${hadoop-three.version}</hadoop.version>
-          </properties>
-          <dependencies>
-            <dependency>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-common</artifactId>
-              <scope>provided</scope>
-            </dependency>
-            <dependency>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-hdfs</artifactId>
-              <scope>provided</scope>
-            </dependency>
-            <dependency>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-auth</artifactId>
-              <scope>provided</scope>
-            </dependency>
-            <dependency>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-mapreduce-client-core</artifactId>
-              <scope>provided</scope>
-              <exclusions>
-                <exclusion>
-                  <groupId>com.google.guava</groupId>
-                  <artifactId>guava</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>javax.xml.bind</groupId>
-                  <artifactId>jaxb-api</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>javax.ws.rs</groupId>
-                  <artifactId>jsr311-api</artifactId>
-                </exclusion>
-              </exclusions>
-            </dependency>
-            <dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-auth</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <scope>provided</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>com.google.guava</groupId>
+              <artifactId>guava</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>javax.xml.bind</groupId>
+              <artifactId>jaxb-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>javax.ws.rs</groupId>
+              <artifactId>jsr311-api</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-jaxrs</artifactId>
+          <version>1.9.13</version>
+          <scope>provided</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-mapper-asl</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>org.codehaus.jackson</groupId>
-              <artifactId>jackson-jaxrs</artifactId>
-              <version>1.9.13</version>
-              <scope>provided</scope>
-              <exclusions>
-                <exclusion>
-                  <groupId>org.codehaus.jackson</groupId>
-                  <artifactId>jackson-mapper-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>org.codehaus.jackson</groupId>
-                  <artifactId>jackson-core-asl</artifactId>
-                </exclusion>
-              </exclusions>
-            </dependency>
-            <dependency>
+              <artifactId>jackson-core-asl</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-xc</artifactId>
+          <version>1.9.13</version>
+          <scope>provided</scope>
+          <exclusions>
+            <exclusion>
               <groupId>org.codehaus.jackson</groupId>
-              <artifactId>jackson-xc</artifactId>
-              <version>1.9.13</version>
-              <scope>provided</scope>
-              <exclusions>
-                <exclusion>
-                  <groupId>org.codehaus.jackson</groupId>
-                  <artifactId>jackson-mapper-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>org.codehaus.jackson</groupId>
-                  <artifactId>jackson-core-asl</artifactId>
-                </exclusion>
-              </exclusions>
-            </dependency>
-          </dependencies>
-        </profile>
-    </profiles>
+              <artifactId>jackson-mapper-asl</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.codehaus.jackson</groupId>
+              <artifactId>jackson-core-asl</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
index 4750c9c..e71a067 100644
--- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
@@ -1,68 +1,86 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <!--
-      /**
-       * Licensed to the Apache Software Foundation (ASF) under one
-       * or more contributor license agreements.  See the NOTICE file
-       * distributed with this work for additional information
-       * regarding copyright ownership.  The ASF licenses this file
-       * to you under the Apache License, Version 2.0 (the
-       * "License"); you may not use this file except in compliance
-       * with the License.  You may obtain a copy of the License at
-       *
-       *     http://www.apache.org/licenses/LICENSE-2.0
-       *
-       * Unless required by applicable law or agreed to in writing, software
-       * distributed under the License is distributed on an "AS IS" BASIS,
-       * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-       * See the License for the specific language governing permissions and
-       * limitations under the License.
-       */
-      -->
-    <modelVersion>4.0.0</modelVersion>
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<!--
+  /**
+   * Licensed to the Apache Software Foundation (ASF) under one
+   * or more contributor license agreements.  See the NOTICE file
+   * distributed with this work for additional information
+   * regarding copyright ownership.  The ASF licenses this file
+   * to you under the Apache License, Version 2.0 (the
+   * "License"); you may not use this file except in compliance
+   * with the License.  You may obtain a copy of the License at
+   *
+   *     http://www.apache.org/licenses/LICENSE-2.0
+   *
+   * Unless required by applicable law or agreed to in writing, software
+   * distributed under the License is distributed on an "AS IS" BASIS,
+   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   * See the License for the specific language governing permissions and
+   * limitations under the License.
+   */
+-->
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.hbase</groupId>
-        <artifactId>hbase-build-configuration</artifactId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>../../hbase-build-configuration</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.hbase</groupId>
+    <artifactId>hbase-build-configuration</artifactId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>../../hbase-build-configuration</relativePath>
+  </parent>
 
-    <artifactId>hbase-shaded-testing-util-tester</artifactId>
-    <name>Apache HBase - Shaded - Testing Util Tester</name>
-    <description>Ensures that hbase-shaded-testing-util works with hbase-shaded-client.</description>
+  <artifactId>hbase-shaded-testing-util-tester</artifactId>
+  <name>Apache HBase - Shaded - Testing Util Tester</name>
+  <description>Ensures that hbase-shaded-testing-util works with hbase-shaded-client.</description>
 
-    <dependencies>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-shaded-client</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-shaded-testing-util</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.codehaus.jackson</groupId>
-            <artifactId>jackson-mapper-asl</artifactId>
-            <version>1.9.13</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-logging</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-shaded-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-shaded-testing-util</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>1.9.13</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 
 </project>
diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml
index dbdec08..a83b515 100644
--- a/hbase-shaded/hbase-shaded-testing-util/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml
@@ -1,234 +1,203 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <!--
-      /**
-       * Licensed to the Apache Software Foundation (ASF) under one
-       * or more contributor license agreements.  See the NOTICE file
-       * distributed with this work for additional information
-       * regarding copyright ownership.  The ASF licenses this file
-       * to you under the Apache License, Version 2.0 (the
-       * "License"); you may not use this file except in compliance
-       * with the License.  You may obtain a copy of the License at
-       *
-       *     http://www.apache.org/licenses/LICENSE-2.0
-       *
-       * Unless required by applicable law or agreed to in writing, software
-       * distributed under the License is distributed on an "AS IS" BASIS,
-       * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-       * See the License for the specific language governing permissions and
-       * limitations under the License.
-       */
-      -->
-    <modelVersion>4.0.0</modelVersion>
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<!--
+  /**
+   * Licensed to the Apache Software Foundation (ASF) under one
+   * or more contributor license agreements.  See the NOTICE file
+   * distributed with this work for additional information
+   * regarding copyright ownership.  The ASF licenses this file
+   * to you under the Apache License, Version 2.0 (the
+   * "License"); you may not use this file except in compliance
+   * with the License.  You may obtain a copy of the License at
+   *
+   *     http://www.apache.org/licenses/LICENSE-2.0
+   *
+   * Unless required by applicable law or agreed to in writing, software
+   * distributed under the License is distributed on an "AS IS" BASIS,
+   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   * See the License for the specific language governing permissions and
+   * limitations under the License.
+   */
+-->
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <artifactId>hbase-shaded</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>..</relativePath>
-    </parent>
+  <parent>
+    <artifactId>hbase-shaded</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
 
-    <artifactId>hbase-shaded-testing-util</artifactId>
-    <name>Apache HBase - Shaded - Testing Util</name>
-
-    <dependencies>
-        <!-- test-jar dependencies -->
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${hadoop.version}</version>
-            <type>test-jar</type>
-            <scope>compile</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>javax.servlet.jsp</groupId>
-                    <artifactId>jsp-api</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-mapper-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-core-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-jaxrs</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-xc</artifactId>
-                </exclusion>
-                <exclusion>
-                   <groupId>javax.xml.bind</groupId>
-                   <artifactId>jaxb-api</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>javax.ws.rs</groupId>
-                  <artifactId>jsr311-api</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-app</artifactId>
-            <version>${hadoop.version}</version>
-            <type>test-jar</type>
-            <scope>compile</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-mapper-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-core-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-jaxrs</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-xc</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>javax.xml.bind</groupId>
-                    <artifactId>jaxb-api</artifactId>
-                </exclusion>
-                <exclusion>
-                  <groupId>javax.ws.rs</groupId>
-                  <artifactId>jsr311-api</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-            <version>${hadoop.version}</version>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-            <exclusions>
-              <exclusion>
-                 <groupId>javax.xml.bind</groupId>
-                 <artifactId>jaxb-api</artifactId>
-              </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-asyncfs</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-zookeeper</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop-compat</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.codehaus.jackson</groupId>
-            <artifactId>jackson-jaxrs</artifactId>
-            <version>1.9.13</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-testing-util</artifactId>
-            <version>${project.version}</version>
-            <scope>compile</scope>
-            <exclusions>
-              <exclusion>
-                <groupId>javax.xml.bind</groupId>
-                <artifactId>jaxb-api</artifactId>
-              </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-site-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <!--Make it so assembly:single does nothing in here-->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <configuration>
-                    <skipAssembly>true</skipAssembly>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>aggregate-into-a-jar-with-relocated-third-parties</id>
-                        <configuration>
-                            <artifactSet>
-                                <excludes>
-                                    <!-- exclude J2EE modules that come in for JDK11+ (since
-                                         hadoop-3.2.0) or modules that come in for JDK8+ but
-                                         need not be included -->
-                                    <exclude>javax.annotation:javax.annotation-api</exclude>
-                                    <exclude>javax.activation:javax.activation-api</exclude>
-                                    <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
-                                    <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
-                                    <exclude>jakarta.validation:jakarta.validation-api</exclude>
-                                    <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
-                                    <!--
-                                      Tell the shade plugin that in this case we want to include hadoop
-                                      by leaving out the exclude.
-                                      -->
-                                    <!-- The rest of these should be kept in sync with the parent pom -->
-                                    <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
-                                    <exclude>org.slf4j:*</exclude>
-                                    <exclude>com.google.code.findbugs:*</exclude>
-                                    <exclude>com.github.stephenc.findbugs:*</exclude>
-                                    <exclude>com.github.spotbugs:*</exclude>
-                                    <exclude>org.apache.htrace:*</exclude>
-                                    <exclude>org.apache.yetus:*</exclude>
-                                    <exclude>log4j:*</exclude>
-                                    <exclude>commons-logging:*</exclude>
-                                    <exclude>org.javassist:*</exclude>
-                                </excludes>
-                            </artifactSet>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
+  <artifactId>hbase-shaded-testing-util</artifactId>
+  <name>Apache HBase - Shaded - Testing Util</name>
 
+  <dependencies>
+    <!-- test-jar dependencies -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <type>test-jar</type>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-mapper-asl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-core-asl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-jaxrs</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-xc</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.xml.bind</groupId>
+          <artifactId>jaxb-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.ws.rs</groupId>
+          <artifactId>jsr311-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.xml.bind</groupId>
+          <artifactId>jaxb-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-asyncfs</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-jaxrs</artifactId>
+      <version>1.9.13</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-testing-util</artifactId>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.xml.bind</groupId>
+          <artifactId>jaxb-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>aggregate-into-a-jar-with-relocated-third-parties</id>
+            <configuration>
+              <artifactSet>
+                <excludes>
+                  <!-- exclude J2EE modules that come in for JDK11+ (since
+                       hadoop-3.2.0) or modules that come in for JDK8+ but
+                       need not be included -->
+                  <exclude>javax.annotation:javax.annotation-api</exclude>
+                  <exclude>javax.activation:javax.activation-api</exclude>
+                  <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
+                  <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
+                  <exclude>jakarta.validation:jakarta.validation-api</exclude>
+                  <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
+                  <!--
+                    Tell the shade plugin that in this case we want to include hadoop
+                    by leaving out the exclude.
+                  -->
+                  <!-- The rest of these should be kept in sync with the parent pom -->
+                  <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
+                  <exclude>org.slf4j:*</exclude>
+                  <exclude>com.google.code.findbugs:*</exclude>
+                  <exclude>com.github.stephenc.findbugs:*</exclude>
+                  <exclude>com.github.spotbugs:*</exclude>
+                  <exclude>org.apache.htrace:*</exclude>
+                  <exclude>org.apache.yetus:*</exclude>
+                  <exclude>org.apache.logging.log4j:*</exclude>
+                  <exclude>commons-logging:*</exclude>
+                  <exclude>org.javassist:*</exclude>
+                </excludes>
+              </artifactSet>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
index 2d05ee2..1ebdf59 100644
--- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
@@ -40,7 +40,6 @@
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-shaded-client</artifactId>
-      <version>${project.version}</version>
     </dependency>
     <!-- parent pom defines these for children. :( :( :( -->
     <dependency>
@@ -49,8 +48,18 @@
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>provided</scope>
     </dependency>
     <!-- Test dependencies -->
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index 88832ba..7c78d18 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -1,5 +1,7 @@
 <?xml version="1.0"?>
-<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="https://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <!--
       /**
        * Licensed to the Apache Software Foundation (ASF) under one
@@ -19,544 +21,544 @@
        * limitations under the License.
        */
       -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-build-configuration</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>../hbase-build-configuration</relativePath>
-    </parent>
-    <artifactId>hbase-shaded</artifactId>
-    <name>Apache HBase - Shaded</name>
-    <description>Module of HBase with most deps shaded.</description>
-    <packaging>pom</packaging>
-    <properties>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-build-configuration</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>../hbase-build-configuration</relativePath>
+  </parent>
+  <artifactId>hbase-shaded</artifactId>
+  <name>Apache HBase - Shaded</name>
+  <description>Module of HBase with most deps shaded.</description>
+  <packaging>pom</packaging>
+  <properties>
       <!-- Don't make a test-jar -->
-      <maven.test.skip>true</maven.test.skip>
+    <maven.test.skip>true</maven.test.skip>
       <!-- Don't make a source-jar -->
-      <source.skip>true</source.skip>
-      <license.bundles.dependencies>true</license.bundles.dependencies>
-      <shaded.prefix>org.apache.hadoop.hbase.shaded</shaded.prefix>
-    </properties>
-    <modules>
-        <module>hbase-shaded-client-byo-hadoop</module>
-        <module>hbase-shaded-client</module>
-        <module>hbase-shaded-mapreduce</module>
-        <module>hbase-shaded-testing-util</module>
-        <module>hbase-shaded-testing-util-tester</module>
-        <module>hbase-shaded-check-invariants</module>
-        <module>hbase-shaded-with-hadoop-check-invariants</module>
-    </modules>
-    <dependencies>
-      <dependency>
-         <groupId>org.apache.hbase</groupId>
-         <artifactId>hbase-resource-bundle</artifactId>
-         <optional>true</optional>
-      </dependency>
-      <!-- put the log implementations to optional -->
-      <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <optional>true</optional>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-log4j12</artifactId>
-        <optional>true</optional>
-      </dependency>
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <!--Make it so assembly:single does nothing in here-->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <configuration>
-                    <skipAssembly>true</skipAssembly>
-                </configuration>
-            </plugin>
-            <!-- licensing info from our dependencies -->
-            <plugin>
-              <groupId>org.apache.maven.plugins</groupId>
-              <artifactId>maven-remote-resources-plugin</artifactId>
-              <executions>
-                <execution>
-                  <id>aggregate-licenses</id>
-                  <goals>
-                    <goal>process</goal>
-                  </goals>
-                  <configuration>
-                    <properties>
-                      <copyright-end-year>${build.year}</copyright-end-year>
-                      <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
-                      <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
-                      <bundled-jquery>${license.bundles.jquery}</bundled-jquery>
-                      <bundled-logo>${license.bundles.logo}</bundled-logo>
-                      <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
-                    </properties>
-                    <resourceBundles>
-                      <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
-                    </resourceBundles>
-                    <supplementalModelArtifacts>
-                      <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
-                    </supplementalModelArtifacts>
-                    <supplementalModels>
-                      <supplementalModel>supplemental-models.xml</supplementalModel>
-                    </supplementalModels>
-                  </configuration>
-                </execution>
-              </executions>
-            </plugin>
-        </plugins>
-        <pluginManagement>
-            <plugins>
-                <plugin>
-                    <!--Make it so assembly:single does nothing in here-->
-                    <artifactId>maven-assembly-plugin</artifactId>
-                    <configuration>
-                        <skipAssembly>true</skipAssembly>
-                    </configuration>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-shade-plugin</artifactId>
-                    <version>3.2.4</version>
-                    <executions>
-                        <execution>
-                            <id>aggregate-into-a-jar-with-relocated-third-parties</id>
-                            <phase>package</phase>
-                            <goals>
-                                <goal>shade</goal>
-                            </goals>
-                            <configuration>
-                                <createSourcesJar>false</createSourcesJar>
-                                <shadedArtifactAttached>false</shadedArtifactAttached>
-                                <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
-                                <shadeTestJar>false</shadeTestJar>
-                                <artifactSet>
-                                    <excludes>
-                                        <!-- exclude J2EE modules that come in for JDK11+ (since
-                                             hadoop-3.2.0) or modules that come in for JDK8+ but
-                                             need not be included -->
-                                        <exclude>javax.annotation:javax.annotation-api</exclude>
-                                        <exclude>javax.activation:javax.activation-api</exclude>
-                                        <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
-                                        <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
-                                        <exclude>jakarta.validation:jakarta.validation-api</exclude>
-                                        <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
-                                        <!-- default to excluding Hadoop, have module that want
-                                             to include it redefine the exclude list -->
-                                        <exclude>org.apache.hadoop:*</exclude>
-                                        <!-- the rest of this needs to be kept in sync with any
-                                             hadoop-including module -->
-                                        <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
-                                        <exclude>org.slf4j:*</exclude>
-                                        <exclude>com.google.code.findbugs:*</exclude>
-                                        <exclude>com.github.stephenc.findbugs:*</exclude>
-                                        <exclude>com.github.spotbugs:*</exclude>
-                                        <exclude>org.apache.htrace:*</exclude>
-                                        <exclude>org.apache.yetus:*</exclude>
-                                        <exclude>log4j:*</exclude>
-                                        <exclude>commons-logging:*</exclude>
-                                        <exclude>org.javassist:*</exclude>
-                                    </excludes>
-                                </artifactSet>
-                                <relocations>
-                                    <!-- top level com not including sun-->
-                                    <relocation>
-                                        <pattern>com.cedarsoftware</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.cedarsoftware</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.codahale</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.codahale</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.ctc</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.ctc</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.dropwizard</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.dropwizard</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.fasterxml</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.fasterxml</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.github.benmanes.caffeine</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.github.benmanes.caffeine</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.google</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.google</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.jamesmurty</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.jamesmurty</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.jcraft</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.jcraft</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.lmax</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.lmax</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.microsoft</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.microsoft</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.nimbusds</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.nimbusds</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.squareup</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.squareup</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.thoughtworks</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.thoughtworks</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>com.zaxxer</pattern>
-                                        <shadedPattern>${shaded.prefix}.com.zaxxer</shadedPattern>
-                                    </relocation>
+    <source.skip>true</source.skip>
+    <license.bundles.dependencies>true</license.bundles.dependencies>
+    <shaded.prefix>org.apache.hadoop.hbase.shaded</shaded.prefix>
+  </properties>
+  <modules>
+    <module>hbase-shaded-client-byo-hadoop</module>
+    <module>hbase-shaded-client</module>
+    <module>hbase-shaded-mapreduce</module>
+    <module>hbase-shaded-testing-util</module>
+    <module>hbase-shaded-testing-util-tester</module>
+    <module>hbase-shaded-check-invariants</module>
+    <module>hbase-shaded-with-hadoop-check-invariants</module>
+  </modules>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-resource-bundle</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <!-- put the log implementations to optional -->
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <optional>true</optional>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <!-- licensing info from our dependencies -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-remote-resources-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>aggregate-licenses</id>
+            <goals>
+              <goal>process</goal>
+            </goals>
+            <configuration>
+              <properties>
+                <copyright-end-year>${build.year}</copyright-end-year>
+                <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
+                <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
+                <bundled-jquery>${license.bundles.jquery}</bundled-jquery>
+                <bundled-logo>${license.bundles.logo}</bundled-logo>
+                <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
+              </properties>
+              <resourceBundles>
+                <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
+              </resourceBundles>
+              <supplementalModelArtifacts>
+                <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
+              </supplementalModelArtifacts>
+              <supplementalModels>
+                <supplementalModel>supplemental-models.xml</supplementalModel>
+              </supplementalModels>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <!--Make it so assembly:single does nothing in here-->
+          <artifactId>maven-assembly-plugin</artifactId>
+          <configuration>
+            <skipAssembly>true</skipAssembly>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-shade-plugin</artifactId>
+          <version>3.2.4</version>
+          <executions>
+            <execution>
+              <id>aggregate-into-a-jar-with-relocated-third-parties</id>
+              <phase>package</phase>
+              <goals>
+                <goal>shade</goal>
+              </goals>
+              <configuration>
+                <createSourcesJar>false</createSourcesJar>
+                <shadedArtifactAttached>false</shadedArtifactAttached>
+                <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+                <shadeTestJar>false</shadeTestJar>
+                <artifactSet>
+                  <excludes>
+                    <!-- exclude J2EE modules that come in for JDK11+ (since
+                         hadoop-3.2.0) or modules that come in for JDK8+ but
+                         need not be included -->
+                    <exclude>javax.annotation:javax.annotation-api</exclude>
+                    <exclude>javax.activation:javax.activation-api</exclude>
+                    <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
+                    <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
+                    <exclude>jakarta.validation:jakarta.validation-api</exclude>
+                    <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
+                    <!-- default to excluding Hadoop, have module that want
+                         to include it redefine the exclude list -->
+                    <exclude>org.apache.hadoop:*</exclude>
+                    <!-- the rest of this needs to be kept in sync with any
+                         hadoop-including module -->
+                    <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
+                    <exclude>org.slf4j:*</exclude>
+                    <exclude>com.google.code.findbugs:*</exclude>
+                    <exclude>com.github.stephenc.findbugs:*</exclude>
+                    <exclude>com.github.spotbugs:*</exclude>
+                    <exclude>org.apache.htrace:*</exclude>
+                    <exclude>org.apache.yetus:*</exclude>
+                    <exclude>org.apache.logging.log4j:*</exclude>
+                    <exclude>commons-logging:*</exclude>
+                    <exclude>org.javassist:*</exclude>
+                  </excludes>
+                </artifactSet>
+                <relocations>
+                  <!-- top level com not including sun-->
+                  <relocation>
+                    <pattern>com.cedarsoftware</pattern>
+                    <shadedPattern>${shaded.prefix}.com.cedarsoftware</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.codahale</pattern>
+                    <shadedPattern>${shaded.prefix}.com.codahale</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.ctc</pattern>
+                    <shadedPattern>${shaded.prefix}.com.ctc</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.dropwizard</pattern>
+                    <shadedPattern>${shaded.prefix}.com.dropwizard</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.fasterxml</pattern>
+                    <shadedPattern>${shaded.prefix}.com.fasterxml</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.github.benmanes.caffeine</pattern>
+                    <shadedPattern>${shaded.prefix}.com.github.benmanes.caffeine</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.google</pattern>
+                    <shadedPattern>${shaded.prefix}.com.google</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.jamesmurty</pattern>
+                    <shadedPattern>${shaded.prefix}.com.jamesmurty</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.jcraft</pattern>
+                    <shadedPattern>${shaded.prefix}.com.jcraft</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.lmax</pattern>
+                    <shadedPattern>${shaded.prefix}.com.lmax</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.microsoft</pattern>
+                    <shadedPattern>${shaded.prefix}.com.microsoft</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.nimbusds</pattern>
+                    <shadedPattern>${shaded.prefix}.com.nimbusds</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.squareup</pattern>
+                    <shadedPattern>${shaded.prefix}.com.squareup</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.thoughtworks</pattern>
+                    <shadedPattern>${shaded.prefix}.com.thoughtworks</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>com.zaxxer</pattern>
+                    <shadedPattern>${shaded.prefix}.com.zaxxer</shadedPattern>
+                  </relocation>
+                  <!-- dnsjava -->
+                  <relocation>
+                    <pattern>org.xbill</pattern>
+                    <shadedPattern>${shaded.prefix}.org.xbill</shadedPattern>
+                  </relocation>
+                  <!-- netty family -->
+                  <relocation>
+                    <pattern>org.jboss.netty</pattern>
+                    <shadedPattern>${shaded.prefix}.org.jboss.netty</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>io.netty</pattern>
+                    <shadedPattern>${shaded.prefix}.io.netty</shadedPattern>
+                  </relocation>
+                  <!-- top level okio -->
+                  <relocation>
+                    <pattern>okio</pattern>
+                    <shadedPattern>${shaded.prefix}.okio</shadedPattern>
+                  </relocation>
+                  <!-- top level org -->
+                  <relocation>
+                    <pattern>org.checkerframework</pattern>
+                    <shadedPattern>${shaded.prefix}.org.checkerframework</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.codehaus</pattern>
+                    <shadedPattern>${shaded.prefix}.org.codehaus</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.eclipse</pattern>
+                    <shadedPattern>${shaded.prefix}.org.eclipse</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.ehcache</pattern>
+                    <shadedPattern>${shaded.prefix}.org.ehcache</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.jcodings</pattern>
+                    <shadedPattern>${shaded.prefix}.org.jcodings</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.joni</pattern>
+                    <shadedPattern>${shaded.prefix}.org.joni</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.mortbay</pattern>
+                    <shadedPattern>${shaded.prefix}.org.mortbay</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.nustaq</pattern>
+                    <shadedPattern>${shaded.prefix}.org.nustaq</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.terracotta</pattern>
+                    <shadedPattern>${shaded.prefix}.org.terracotta</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.tukaani</pattern>
+                    <shadedPattern>${shaded.prefix}.org.tukaani</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.xerial</pattern>
+                    <shadedPattern>${shaded.prefix}.org.xerial</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.znerd</pattern>
+                    <shadedPattern>${shaded.prefix}.org.znerd</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.aopalliance</pattern>
+                    <shadedPattern>${shaded.prefix}.org.aopalliance</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.fusesource</pattern>
+                    <shadedPattern>${shaded.prefix}.org.fusesource</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.iq80</pattern>
+                    <shadedPattern>${shaded.prefix}.org.iq80</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.jamon</pattern>
+                    <shadedPattern>${shaded.prefix}.org.jamon</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.jets3t</pattern>
+                    <shadedPattern>${shaded.prefix}.org.jets3t</shadedPattern>
+                  </relocation>
+                  <!-- poorly named add-on package from jets3t dependency. TODO can we just exclude these? -->
+                  <relocation>
+                    <pattern>contribs.mx</pattern>
+                    <shadedPattern>${shaded.prefix}.contribs.mx</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.objectweb</pattern>
+                    <shadedPattern>${shaded.prefix}.org.objectweb</shadedPattern>
+                  </relocation>
+                  <!-- org.apache relocations not in org.apache.hadoop or org.apache.commons -->
+                  <relocation>
+                    <pattern>org.apache.avro</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.avro</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.curator</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.curator</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.directory</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.directory</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.http</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.http</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.jasper</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.jasper</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.jute</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.jute</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.kerby</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.kerby</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.taglibs</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.taglibs</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.zookeeper</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.zookeeper</shadedPattern>
+                  </relocation>
+                  <!-- org.apache.commons not including logging -->
+                  <relocation>
+                    <pattern>org.apache.commons.validator</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.validator</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.beanutils</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.beanutils</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.cli</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.cli</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.collections</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.collections</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.configuration</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.configuration</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.crypto</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.crypto</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.csv</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.csv</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.daemon</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.daemon</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.io</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.io</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.math</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.math</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.math3</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.math3</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.net</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.net</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.lang</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.lang</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.lang3</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.lang3</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.el</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.el</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.httpclient</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.httpclient</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.compress</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.compress</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.digester</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.digester</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.codec</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.codec</shadedPattern>
+                  </relocation>
+                  <relocation>
+                    <pattern>org.apache.commons.text</pattern>
+                    <shadedPattern>${shaded.prefix}.org.apache.commons.text</shadedPattern>
+                  </relocation>
+                  <!-- top level net-->
+                  <relocation>
+                    <pattern>net/</pattern>
+                    <shadedPattern>${shaded.prefix}.net.</shadedPattern>
+                  </relocation>
 
-                                    <!-- dnsjava -->
-                                    <relocation>
-                                        <pattern>org.xbill</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.xbill</shadedPattern>
-                                    </relocation>
-
-                                    <!-- netty family -->
-                                    <relocation>
-                                        <pattern>org.jboss.netty</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.jboss.netty</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>io.netty</pattern>
-                                        <shadedPattern>${shaded.prefix}.io.netty</shadedPattern>
-                                    </relocation>
-
-                                    <!-- top level okio -->
-                                    <relocation>
-                                        <pattern>okio</pattern>
-                                        <shadedPattern>${shaded.prefix}.okio</shadedPattern>
-                                    </relocation>
-
-                                    <!-- top level org -->
-                                    <relocation>
-                                      <pattern>org.checkerframework</pattern>
-                                      <shadedPattern>${shaded.prefix}.org.checkerframework</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                      <pattern>org.codehaus</pattern>
-                                      <shadedPattern>${shaded.prefix}.org.codehaus</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.eclipse</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.eclipse</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.ehcache</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.ehcache</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.jcodings</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.jcodings</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.joni</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.joni</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.mortbay</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.mortbay</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.nustaq</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.nustaq</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.terracotta</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.terracotta</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.tukaani</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.tukaani</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.xerial</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.xerial</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.znerd</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.znerd</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.aopalliance</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.aopalliance</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.fusesource</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.fusesource</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.iq80</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.iq80</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.jamon</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.jamon</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.jets3t</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.jets3t</shadedPattern>
-                                    </relocation>
-                                    <!-- poorly named add-on package from jets3t dependency. TODO can we just exclude these? -->
-                                    <relocation>
-                                        <pattern>contribs.mx</pattern>
-                                        <shadedPattern>${shaded.prefix}.contribs.mx</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.objectweb</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.objectweb</shadedPattern>
-                                    </relocation>
-
-
-                                    <!-- org.apache relocations not in org.apache.hadoop or org.apache.commons -->
-                                    <relocation>
-                                        <pattern>org.apache.avro</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.avro</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.curator</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.curator</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.directory</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.directory</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.http</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.http</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.jasper</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.jasper</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.jute</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.jute</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.kerby</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.kerby</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.taglibs</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.taglibs</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.zookeeper</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.zookeeper</shadedPattern>
-                                    </relocation>
-
-                                    <!-- org.apache.commons not including logging -->
-                                    <relocation>
-                                        <pattern>org.apache.commons.validator</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.validator</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.beanutils</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.beanutils</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.cli</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.cli</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.collections</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.collections</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.configuration</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.configuration</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.crypto</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.crypto</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.csv</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.csv</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.daemon</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.daemon</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.io</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.io</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.math</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.math</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.math3</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.math3</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.net</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.net</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.lang</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.lang</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.lang3</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.lang3</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.el</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.el</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.httpclient</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.httpclient</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.compress</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.compress</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.digester</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.digester</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.codec</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.codec</shadedPattern>
-                                    </relocation>
-                                    <relocation>
-                                        <pattern>org.apache.commons.text</pattern>
-                                        <shadedPattern>${shaded.prefix}.org.apache.commons.text</shadedPattern>
-                                    </relocation>
-
-                                    <!-- top level net-->
-                                    <relocation>
-                                        <pattern>net/</pattern>
-                                        <shadedPattern>${shaded.prefix}.net.</shadedPattern>
-                                    </relocation>
-
-                                </relocations>
-                                <transformers>
-                                  <!-- Need to filter out some extraneous license files.
-                                       Don't use the ApacheLicenseRT because it just removes all
-                                       META-INF/LICENSE(.txt)? files, including ours. -->
-                                  <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
-                                    <resources>
-                                      <resource>LICENSE.txt</resource>
-                                      <resource>ASL2.0</resource>
+                </relocations>
+                <transformers>
+                  <!-- Need to filter out some extraneous license files.
+                       Don't use the ApacheLicenseRT because it just removes all
+                       META-INF/LICENSE(.txt)? files, including ours. -->
+                  <transformer
+                    implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                    <resources>
+                      <resource>LICENSE.txt</resource>
+                      <resource>ASL2.0</resource>
                                       <!-- also this unneeded doc -->
-                                      <resource>overview.html</resource>
-                                    </resources>
-                                  </transformer>
-                                  <!-- Where notices exist, just concat them -->
-                                  <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
-                                    <addHeader>false</addHeader>
-                                    <projectName>${project.name}</projectName>
-                                  </transformer>
-                                  <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
-                                  </transformer>
-                                </transformers>
-                                <filters>
-                                    <!-- remove utility classes which are not required from dnsjava -->
-                                    <filter>
-                                        <artifact>dnsjava:dnsjava</artifact>
-                                        <excludes>
-                                            <exclude>dig*</exclude>
-                                            <exclude>jnamed*</exclude>
-                                            <exclude>lookup*</exclude>
-                                            <exclude>update*</exclude>
-                                        </excludes>
-                                    </filter>
-                                  <filter>
+                      <resource>overview.html</resource>
+                    </resources>
+                  </transformer>
+                  <!-- Where notices exist, just concat them -->
+                  <transformer
+                    implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
+                    <addHeader>false</addHeader>
+                    <projectName>${project.name}</projectName>
+                  </transformer>
+                  <transformer
+                    implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
+                  </transformer>
+                </transformers>
+                <filters>
+                  <!-- remove utility classes which are not required from dnsjava -->
+                  <filter>
+                    <artifact>dnsjava:dnsjava</artifact>
+                    <excludes>
+                      <exclude>dig*</exclude>
+                      <exclude>jnamed*</exclude>
+                      <exclude>lookup*</exclude>
+                      <exclude>update*</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
                                     <!-- this is a signed osgi bundle -->
-                                    <artifact>org.eclipse.jetty.orbit:javax.servlet.jsp.jstl</artifact>
-                                    <excludes>
-                                      <exclude>META-INF/ECLIPSEF.SF</exclude>
-                                      <exclude>META-INF/ECLIPSEF.RSA</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <!-- Duplication of classes that ship in commons-collections 2.x and 3.x
-                                         If we stop bundling a relevant commons-collections artifact we'll
-                                         need to revisit. See: https://s.apache.org/e09o
-                                    -->
-                                    <artifact>commons-beanutils:commons-beanutils-core</artifact>
-                                    <excludes>
-                                      <exclude>org/apache/commons/collections/*.class</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <!-- server side webapps that we don't need -->
-                                    <artifact>org.apache.hadoop:hadoop-yarn-common</artifact>
-                                    <excludes>
-                                      <exclude>webapps/*</exclude>
-                                      <exclude>webapps/**/*</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                      <!-- proto source files aren't needed -->
-                                      <exclude>*.proto</exclude>
-                                      <exclude>**/*.proto</exclude>
-                                      <!-- We already concat NOTICE, safe to drop individual ones -->
-                                      <exclude>LICENSE</exclude>
-                                      <exclude>NOTICE</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <!-- skip french localization -->
-                                    <artifact>org.apache.commons:commons-math3</artifact>
-                                    <excludes>
-                                      <exclude>assets/org/apache/commons/math3/**/*</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <!-- appears to be the result of a conflict in hadoop artifacts -->
-                                    <artifact>org.apache.hadoop:*</artifact>
-                                    <excludes>
-                                      <exclude>mapred-default.xml.orig</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <!-- unnecessary files that mess up our invariants check -->
-                                  <filter>
-                                    <artifact>org.eclipse.jetty:*</artifact>
-                                    <excludes>
-                                      <exclude>about.html</exclude>
-                                      <exclude>jetty-dir.css</exclude>
-                                    </excludes>
-                                  </filter>
-                                  <filter>
-                                    <artifact>org.apache.kerby:*</artifact>
-                                    <excludes>
-                                      <exclude>krb5-template.conf</exclude>
-                                      <exclude>krb5_udp-template.conf</exclude>
-                                      <exclude>ccache.txt</exclude>
-                                      <exclude>keytab.txt</exclude>
-                                    </excludes>
-                                  </filter>
-                                </filters>
-                            </configuration>
-                        </execution>
-                    </executions>
-                </plugin>
-            </plugins>
-        </pluginManagement>
-    </build>
+                    <artifact>org.eclipse.jetty.orbit:javax.servlet.jsp.jstl</artifact>
+                    <excludes>
+                      <exclude>META-INF/ECLIPSEF.SF</exclude>
+                      <exclude>META-INF/ECLIPSEF.RSA</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <!-- Duplication of classes that ship in commons-collections 2.x and 3.x
+                         If we stop bundling a relevant commons-collections artifact we'll
+                         need to revisit. See: https://s.apache.org/e09o
+                    -->
+                    <artifact>commons-beanutils:commons-beanutils-core</artifact>
+                    <excludes>
+                      <exclude>org/apache/commons/collections/*.class</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <!-- server side webapps that we don't need -->
+                    <artifact>org.apache.hadoop:hadoop-yarn-common</artifact>
+                    <excludes>
+                      <exclude>webapps/*</exclude>
+                      <exclude>webapps/**/*</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <artifact>*:*</artifact>
+                    <excludes>
+                      <!-- proto source files aren't needed -->
+                      <exclude>*.proto</exclude>
+                      <exclude>**/*.proto</exclude>
+                      <!-- We already concat NOTICE, safe to drop individual ones -->
+                      <exclude>LICENSE</exclude>
+                      <exclude>NOTICE</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <!-- skip french localization -->
+                    <artifact>org.apache.commons:commons-math3</artifact>
+                    <excludes>
+                      <exclude>assets/org/apache/commons/math3/**/*</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <!-- appears to be the result of a conflict in hadoop artifacts -->
+                    <artifact>org.apache.hadoop:*</artifact>
+                    <excludes>
+                      <exclude>mapred-default.xml.orig</exclude>
+                    </excludes>
+                  </filter>
+                  <!-- unnecessary files that mess up our invariants check -->
+                  <filter>
+                    <artifact>org.eclipse.jetty:*</artifact>
+                    <excludes>
+                      <exclude>about.html</exclude>
+                      <exclude>jetty-dir.css</exclude>
+                    </excludes>
+                  </filter>
+                  <filter>
+                    <artifact>org.apache.kerby:*</artifact>
+                    <excludes>
+                      <exclude>krb5-template.conf</exclude>
+                      <exclude>krb5_udp-template.conf</exclude>
+                      <exclude>ccache.txt</exclude>
+                      <exclude>keytab.txt</exclude>
+                    </excludes>
+                  </filter>
+                </filters>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+  </build>
 </project>
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index 63db977..6695e76 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -142,13 +142,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index 6be2bfb..2ec5560 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -1,5 +1,7 @@
 <?xml version="1.0"?>
-<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="https://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <!--
       /**
        * Licensed to the Apache Software Foundation (ASF) under one
@@ -19,164 +21,176 @@
        * limitations under the License.
        */
       -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase-build-configuration</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>3.0.0-SNAPSHOT</version>
-        <relativePath>../hbase-build-configuration</relativePath>
-    </parent>
-    <artifactId>hbase-testing-util</artifactId>
-    <name>Apache HBase - Testing Util</name>
-    <description>HBase Testing Utilities.</description>
-    <dependencies>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase-build-configuration</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+    <relativePath>../hbase-build-configuration</relativePath>
+  </parent>
+  <artifactId>hbase-testing-util</artifactId>
+  <name>Apache HBase - Testing Util</name>
+  <description>HBase Testing Utilities.</description>
+  <dependencies>
         <!-- Intra-project dependencies -->
         <!-- we do not want to introduce this to downstream users so still set the scope to test -->
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-logging</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-annotations</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>jdk.tools</groupId>
-                    <artifactId>jdk.tools</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-zookeeper</artifactId>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-zookeeper</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-asyncfs</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop-compat</artifactId>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop-compat</artifactId>
-            <type>test-jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jul-to-slf4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-logging</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-annotations</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-asyncfs</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <type>test-jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jcl-over-slf4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jul-to-slf4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 
-    <profiles>
-        <!-- Profiles for building against different hadoop versions -->
-        <!-- There are a lot of common dependencies used here, should investigate
-        if we can combine these profiles somehow -->
-        <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
-        <profile>
-            <id>hadoop-3.0</id>
-            <activation>
-                <property><name>!hadoop.profile</name></property>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-common</artifactId>
-                    <exclusions>
-                      <exclusion>
-                         <groupId>javax.xml.bind</groupId>
-                         <artifactId>jaxb-api</artifactId>
-                      </exclusion>
-                      <exclusion>
-                       <groupId>javax.ws.rs</groupId>
-                       <artifactId>jsr311-api</artifactId>
-                      </exclusion>
-                    </exclusions>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-minicluster</artifactId>
-                    <scope>compile</scope>
-                    <exclusions>
-                      <exclusion>
-                        <groupId>com.google.guava</groupId>
-                        <artifactId>guava</artifactId>
-                      </exclusion>
-                      <exclusion>
-                       <groupId>javax.ws.rs</groupId>
-                       <artifactId>jsr311-api</artifactId>
-                      </exclusion>
-                    </exclusions>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-minikdc</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
+  <profiles>
+    <!-- Profiles for building against different hadoop versions -->
+    <!-- There are a lot of common dependencies used here, should investigate
+         if we can combine these profiles somehow -->
+    <!-- Profile for building against Hadoop 3.0.0. Activate by default -->
+    <profile>
+      <id>hadoop-3.0</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.xml.bind</groupId>
+              <artifactId>jaxb-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>javax.ws.rs</groupId>
+              <artifactId>jsr311-api</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <scope>compile</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>com.google.guava</groupId>
+              <artifactId>guava</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>javax.ws.rs</groupId>
+              <artifactId>jsr311-api</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minikdc</artifactId>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml
index 6f8d7ba..fca2aa6 100644
--- a/hbase-thrift/pom.xml
+++ b/hbase-thrift/pom.xml
@@ -253,13 +253,23 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 966ef5b..6270d0e 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -166,13 +166,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/pom.xml b/pom.xml
index a66ed58..0f5c6fb 100755
--- a/pom.xml
+++ b/pom.xml
@@ -865,7 +865,7 @@
               </goals>
               <configuration>
                 <excludes>
-                  <exclude>log4j.properties</exclude>
+                  <exclude>log4j2.xml</exclude>
                 </excludes>
               </configuration>
             </execution>
@@ -1180,6 +1180,42 @@
             </configuration>
           </execution>
           <execution>
+            <id>banned-log4j</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <excludes>
+                    <exclude>log4j:log4j</exclude>
+                  </excludes>
+                  <message>
+                    We do not allow log4j dependencies as now we use log4j2
+                  </message>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+          <execution>
+            <id>banned-slf4j-log4j12</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <bannedDependencies>
+                  <excludes>
+                    <exclude>org.slf4j:slf4j-log4j12</exclude>
+                  </excludes>
+                  <message>
+                    We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
+                  </message>
+                </bannedDependencies>
+              </rules>
+            </configuration>
+          </execution>
+          <execution>
             <id>banned-jetty</id>
             <goals>
               <goal>enforce</goal>
@@ -1267,16 +1303,18 @@
                   <reason>Use SLF4j for logging</reason>
                   <bannedImports>
                     <bannedImport>org.apache.commons.logging.**</bannedImport>
+                    <bannedImport>org.apache.log4j.**</bannedImport>
+                    <bannedImport>org.apache.logging.log4j.**</bannedImport>
                   </bannedImports>
                 </restrictImports>
                 <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
                   <includeTestCode>false</includeTestCode>
                   <commentLineBufferSize>512</commentLineBufferSize>
                   <reason>
-                    Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details.
+                    Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
                   </reason>
                   <bannedImports>
-                    <bannedImport>org.apache.log4j.**</bannedImport>
+                    <bannedImport>org.apache.logging.log4j.**</bannedImport>
                   </bannedImports>
                 </restrictImports>
                 <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
@@ -1653,7 +1691,7 @@
     <junit.version>4.13</junit.version>
     <hamcrest.version>1.3</hamcrest.version>
     <htrace.version>4.2.0-incubating</htrace.version>
-    <log4j.version>1.2.17</log4j.version>
+    <log4j2.version>2.14.1</log4j2.version>
     <mockito-core.version>2.28.2</mockito-core.version>
     <protobuf.plugin.version>0.6.1</protobuf.plugin.version>
     <thrift.path>thrift</thrift.path>
@@ -2050,8 +2088,8 @@
       </dependency>
       <!--
         Logging dependencies. In general, we use slf4j as the log facade in HBase, so all sub
-        modules should depend on slf4j-api at compile scope, and then depend on slf4j-log4j12
-        and log4j at test scope(and in hbase-assembly when shipping the binary) to redirect the
+        modules should depend on slf4j-api at compile scope, and then depend on log4j-slf4j-impl
+        and log4j2 at test scope(and in hbase-assembly when shipping the binary) to redirect the
         log message to log4j. Do not introduce logging dependencies other than slf4j-api at compile
         scope as it will mess up the logging framework for downstream users.
         Here we also depend on jcl-over-slf4j and jul-to-slf4j, as some of the libraries we depend
@@ -2063,6 +2101,7 @@
         <artifactId>jettison</artifactId>
         <version>${jettison.version}</version>
       </dependency>
+      <!-- Logging -->
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
@@ -2070,11 +2109,6 @@
       </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-log4j12</artifactId>
-        <version>${slf4j.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.slf4j</groupId>
         <artifactId>jcl-over-slf4j</artifactId>
         <version>${slf4j.version}</version>
       </dependency>
@@ -2084,9 +2118,24 @@
         <version>${slf4j.version}</version>
       </dependency>
       <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <version>${log4j.version}</version>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-api</artifactId>
+        <version>${log4j2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-core</artifactId>
+        <version>${log4j2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-slf4j-impl</artifactId>
+        <version>${log4j2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-1.2-api</artifactId>
+        <version>${log4j2.version}</version>
       </dependency>
       <!-- Avro dependencies we mostly get transitively, manual version coallescing -->
       <dependency>
@@ -2094,8 +2143,6 @@
         <artifactId>avro</artifactId>
         <version>${avro.version}</version>
       </dependency>
-      <!--This is not used by hbase directly.  Used by thrift,
-          dropwizard and zk.-->
       <dependency>
         <groupId>com.github.ben-manes.caffeine</groupId>
         <artifactId>caffeine</artifactId>
@@ -2773,6 +2820,46 @@
          </dependency>
          <dependency>
            <groupId>org.apache.hadoop</groupId>
+           <artifactId>hadoop-mapreduce-client-app</artifactId>
+           <version>${hadoop-three.version}</version>
+           <type>test-jar</type>
+           <exclusions>
+             <exclusion>
+               <groupId>org.codehaus.jackson</groupId>
+               <artifactId>jackson-mapper-asl</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>org.codehaus.jackson</groupId>
+               <artifactId>jackson-core-asl</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>org.codehaus.jackson</groupId>
+               <artifactId>jackson-jaxrs</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>org.codehaus.jackson</groupId>
+               <artifactId>jackson-xc</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>javax.xml.bind</groupId>
+               <artifactId>jaxb-api</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>javax.ws.rs</groupId>
+               <artifactId>jsr311-api</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>org.slf4j</groupId>
+              <artifactId>slf4j-log4j12</artifactId>
+             </exclusion>
+             <exclusion>
+                <groupId>log4j</groupId>
+                <artifactId>log4j</artifactId>
+              </exclusion>
+           </exclusions>
+         </dependency>
+         <dependency>
+           <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
            <version>${hadoop-three.version}</version>
            <exclusions>
@@ -2792,10 +2879,6 @@
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
              </exclusion>
-             <exclusion>
-               <groupId>log4j</groupId>
-               <artifactId>log4j</artifactId>
-             </exclusion>
            </exclusions>
          </dependency>
          <dependency>
@@ -2821,10 +2904,6 @@
                 <groupId>org.slf4j</groupId>
                 <artifactId>slf4j-log4j12</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>log4j</groupId>
-                <artifactId>log4j</artifactId>
-              </exclusion>
            </exclusions>
          </dependency>
          <dependency>
@@ -3051,6 +3130,14 @@
                <groupId>com.google.code.findbugs</groupId>
                <artifactId>jsr305</artifactId>
              </exclusion>
+             <exclusion>
+               <groupId>org.slf4j</groupId>
+               <artifactId>slf4j-log4j12</artifactId>
+             </exclusion>
+             <exclusion>
+               <groupId>log4j</groupId>
+               <artifactId>log4j</artifactId>
+             </exclusion>
            </exclusions>
          </dependency>
          <dependency>
@@ -3126,10 +3213,6 @@
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
              </exclusion>
-             <exclusion>
-               <groupId>log4j</groupId>
-               <artifactId>log4j</artifactId>
-             </exclusion>
            </exclusions>
          </dependency>
          <dependency>