You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2015/08/14 19:23:32 UTC

[3/3] hive git commit: HIVE-11304: Migrate to Log4j2 from Log4j 1.x (Prasanth Jayachandran reviewed by Thejas Nair, Sergey Shelukhin)

HIVE-11304: Migrate to Log4j2 from Log4j 1.x (Prasanth Jayachandran reviewed by Thejas Nair, Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c93d6c77
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c93d6c77
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c93d6c77

Branch: refs/heads/master
Commit: c93d6c77e31e2eb9b40f5167ab3491d44eae351a
Parents: a4849cb
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Fri Aug 14 10:17:20 2015 -0700
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Fri Aug 14 10:17:20 2015 -0700

----------------------------------------------------------------------
 accumulo-handler/pom.xml                        |   4 -
 .../src/main/resources/beeline-log4j.properties |  24 --
 beeline/src/main/resources/beeline-log4j2.xml   |  40 ++++
 bin/ext/beeline.sh                              |   2 +-
 bin/hive                                        |   3 +
 common/pom.xml                                  |  22 +-
 .../org/apache/hadoop/hive/common/LogUtils.java |  18 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   8 +-
 common/src/main/resources/hive-log4j.properties |  88 --------
 common/src/main/resources/hive-log4j2.xml       | 111 +++++++++
 .../hadoop/hive/conf/TestHiveLogging.java       |   8 +-
 .../resources/hive-exec-log4j-test.properties   |  59 -----
 .../test/resources/hive-exec-log4j2-test.xml    |  86 +++++++
 .../test/resources/hive-log4j-test.properties   |  71 ------
 common/src/test/resources/hive-log4j2-test.xml  |  95 ++++++++
 data/conf/hive-log4j-old.properties             |  82 -------
 data/conf/hive-log4j.properties                 |  97 --------
 data/conf/hive-log4j2.xml                       | 148 ++++++++++++
 data/conf/spark/log4j.properties                |  24 --
 data/conf/spark/log4j2.xml                      |  74 ++++++
 docs/xdocs/language_manual/cli.xml              |   2 +-
 hcatalog/bin/hcat_server.sh                     |   2 +-
 hcatalog/bin/templeton.cmd                      |   4 +-
 hcatalog/scripts/hcat_server_start.sh           |   2 +-
 .../content/xdocs/configuration.xml             |   2 +-
 .../src/documentation/content/xdocs/install.xml |   2 +-
 .../deployers/config/hive/hive-log4j.properties |  88 --------
 .../deployers/config/hive/hive-log4j2.xml       | 111 +++++++++
 .../templeton/deployers/start_hive_services.sh  |   2 +-
 .../webhcat/svr/src/main/bin/webhcat_server.sh  |   4 +-
 .../src/main/config/webhcat-log4j.properties    |  45 ----
 .../svr/src/main/config/webhcat-log4j2.xml      |  75 +++++++
 .../main/java/org/apache/hive/hplsql/Exec.java  |   2 +
 .../operation/TestOperationLoggingLayout.java   | 136 +++++++++++
 itests/pom.xml                                  |   2 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |   2 +-
 .../metastore/txn/TestCompactionTxnHandler.java |  40 +++-
 .../hive/metastore/txn/TestTxnHandler.java      |  66 ++++--
 packaging/src/main/assembly/bin.xml             |  14 +-
 pom.xml                                         |  37 ++-
 ql/pom.xml                                      |  17 +-
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |  29 ++-
 .../hive/ql/exec/mr/HadoopJobExecHelper.java    |  20 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |  20 +-
 .../hadoop/hive/ql/log/HiveEventCounter.java    | 135 +++++++++++
 .../apache/hadoop/hive/ql/log/NullAppender.java |  63 ++++++
 .../ql/log/PidDailyRollingFileAppender.java     |  33 ---
 .../hive/ql/log/PidFilePatternConverter.java    |  62 ++++++
 .../main/resources/hive-exec-log4j.properties   |  77 -------
 ql/src/main/resources/hive-exec-log4j2.xml      | 110 +++++++++
 .../hadoop/hive/ql/log/TestLog4j2Appenders.java |  95 ++++++++
 .../hadoop/hive/ql/metadata/StringAppender.java | 128 +++++++++++
 .../hadoop/hive/ql/metadata/TestHive.java       |  50 +++--
 .../hive/service/cli/CLIServiceUtils.java       |   7 -
 .../cli/operation/LogDivertAppender.java        | 223 +++++++++++--------
 .../service/cli/operation/OperationManager.java |  17 +-
 shims/common/pom.xml                            |  17 +-
 .../hadoop/hive/shims/HiveEventCounter.java     | 102 ---------
 .../src/test/resources/log4j.properties         |  23 --
 spark-client/src/test/resources/log4j2.xml      |  39 ++++
 storage-api/pom.xml                             |   7 -
 testutils/ptest2/pom.xml                        |  20 ++
 .../ptest2/src/main/resources/log4j.properties  |  37 ---
 testutils/ptest2/src/main/resources/log4j2.xml  |  79 +++++++
 64 files changed, 1989 insertions(+), 1123 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/accumulo-handler/pom.xml
----------------------------------------------------------------------
diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml
index ee40f72..4e3a087 100644
--- a/accumulo-handler/pom.xml
+++ b/accumulo-handler/pom.xml
@@ -91,10 +91,6 @@
       <artifactId>slf4j-api</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-    </dependency>
-    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/beeline/src/main/resources/beeline-log4j.properties
----------------------------------------------------------------------
diff --git a/beeline/src/main/resources/beeline-log4j.properties b/beeline/src/main/resources/beeline-log4j.properties
deleted file mode 100644
index fe47d94..0000000
--- a/beeline/src/main/resources/beeline-log4j.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=WARN, console
-
-######## console appender ########
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/beeline/src/main/resources/beeline-log4j2.xml
----------------------------------------------------------------------
diff --git a/beeline/src/main/resources/beeline-log4j2.xml b/beeline/src/main/resources/beeline-log4j2.xml
new file mode 100644
index 0000000..5f09741
--- /dev/null
+++ b/beeline/src/main/resources/beeline-log4j2.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="BeelineLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">WARN</Property>
+    <Property name="hive.root.logger">console</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="${sys:hive.log.level}"/>
+    </Root>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/bin/ext/beeline.sh
----------------------------------------------------------------------
diff --git a/bin/ext/beeline.sh b/bin/ext/beeline.sh
index ab3dc1a..9de8f6c 100644
--- a/bin/ext/beeline.sh
+++ b/bin/ext/beeline.sh
@@ -31,7 +31,7 @@ beeline () {
     hadoopClasspath="${HADOOP_CLASSPATH}:"
   fi
   export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}"
-  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties "
+  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.xml "
 
   exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@"
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/bin/hive
----------------------------------------------------------------------
diff --git a/bin/hive b/bin/hive
index 5dc93fb..ad7139e 100755
--- a/bin/hive
+++ b/bin/hive
@@ -193,6 +193,9 @@ if [ "$HADOOP_HOME" == "" ]; then
   exit 4;
 fi
 
+# to avoid errors from log4j2 automatic configuration loading
+export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=hive-log4j2.xml "
+
 HADOOP=$HADOOP_HOME/bin/hadoop
 if [ ! -f ${HADOOP} ]; then
   echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index a7997e2..dba814d 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -66,14 +66,24 @@
       <version>${joda.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>${log4j.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>apache-log4j-extras</artifactId>
-      <version>${log4j-extras.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-web</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-jcl</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 9118675..3ca5c0f 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -18,26 +18,22 @@
 
 package org.apache.hadoop.hive.common;
 
-import java.net.URL;
 import java.io.File;
-import java.io.IOException;
-import java.io.FileNotFoundException;
+import java.net.URL;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.PropertyConfigurator;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.logging.log4j.core.config.Configurator;
 
 /**
  * Utilities common to logging operations.
  */
 public class LogUtils {
 
-  private static final String HIVE_L4J = "hive-log4j.properties";
-  private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
+  private static final String HIVE_L4J = "hive-log4j2.xml";
+  private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml";
   private static final Log l4j = LogFactory.getLog(LogUtils.class);
 
   @SuppressWarnings("serial")
@@ -95,8 +91,7 @@ public class LogUtils {
           }
           System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
         }
-        LogManager.resetConfiguration();
-        PropertyConfigurator.configure(log4jFileName);
+        Configurator.initialize(null, log4jFileName);
         logConfigLocation(conf);
         return ("Logging initialized using configuration in " + log4jConfigFile);
       }
@@ -123,8 +118,7 @@ public class LogUtils {
         break;
     }
     if (hive_l4j != null) {
-      LogManager.resetConfiguration();
-      PropertyConfigurator.configure(hive_l4j);
+      Configurator.initialize(null, hive_l4j.toString());
       logConfigLocation(conf);
       return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 11b9f78..730f5be 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1696,13 +1696,13 @@ public class HiveConf extends Configuration {
     // logging configuration
     HIVE_LOG4J_FILE("hive.log4j.file", "",
         "Hive log4j configuration file.\n" +
-        "If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath.\n" +
-        "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
+        "If the property is not set, then logging will be initialized using hive-log4j2.xml found on the classpath.\n" +
+        "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" +
         "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
     HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", "",
         "Hive log4j configuration file for execution mode(sub command).\n" +
-        "If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath.\n" +
-        "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
+        "If the property is not set, then logging will be initialized using hive-exec-log4j2.xml found on the classpath.\n" +
+        "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" +
         "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
 
     HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/main/resources/hive-log4j.properties
----------------------------------------------------------------------
diff --git a/common/src/main/resources/hive-log4j.properties b/common/src/main/resources/hive-log4j.properties
deleted file mode 100644
index 14fa725..0000000
--- a/common/src/main/resources/hive-log4j.properties
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=INFO,DRFA
-hive.log.dir=${java.io.tmpdir}/${user.name}
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=${hive.log.threshold}
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/main/resources/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/common/src/main/resources/hive-log4j2.xml b/common/src/main/resources/hive-log4j2.xml
new file mode 100644
index 0000000..31b8fcc
--- /dev/null
+++ b/common/src/main/resources/hive-log4j2.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HiveLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">INFO</Property>
+    <Property name="hive.root.logger">DRFA</Property>
+    <Property name="hive.log.dir">${sys:java.io.tmpdir}/${sys:user.name}</Property>
+    <Property name="hive.log.file">hive.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Regular File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <File name="FA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+    </File> -->
+
+    <!-- Daily Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="DRFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+    <!-- Size based Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <RollingFile name="RFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256 MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10"/>
+    </RollingFile> -->
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages attribute.Sends counts of logging messages at different severity levels to Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="${sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
index d5cedb1..92269e7 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
@@ -21,12 +21,12 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.InputStreamReader;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.common.util.HiveTestUtils;
 
+import junit.framework.TestCase;
+
 /**
  * TestHiveLogging
  *
@@ -104,9 +104,9 @@ public class TestHiveLogging extends TestCase {
     // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log
     File customLogPath = new File(new File(System.getProperty("test.tmp.dir")),
         System.getProperty("user.name") + "-TestHiveLogging/");
-    String customLogName = "hiveLog4jTest.log";
+    String customLogName = "hiveLog4j2Test.log";
     File customLogFile = new File(customLogPath, customLogName);
     RunTest(customLogFile,
-      "hive-log4j-test.properties", "hive-exec-log4j-test.properties");
+      "hive-log4j2-test.xml", "hive-exec-log4j2-test.xml");
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-exec-log4j-test.properties
----------------------------------------------------------------------
diff --git a/common/src/test/resources/hive-exec-log4j-test.properties b/common/src/test/resources/hive-exec-log4j-test.properties
deleted file mode 100644
index 1e53f26..0000000
--- a/common/src/test/resources/hive-exec-log4j-test.properties
+++ /dev/null
@@ -1,59 +0,0 @@
-# Define some default values that can be overridden by system properties
-hive.root.logger=INFO,FA
-hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging
-hive.log.file=hiveExecLog4jTest.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# File Appender
-#
-
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
-
-
-log4j.category.DataNucleus=ERROR,FA
-log4j.category.Datastore=ERROR,FA
-log4j.category.Datastore.Schema=ERROR,FA
-log4j.category.JPOX.Datastore=ERROR,FA
-log4j.category.JPOX.Plugin=ERROR,FA
-log4j.category.JPOX.MetaData=ERROR,FA
-log4j.category.JPOX.Query=ERROR,FA
-log4j.category.JPOX.General=ERROR,FA
-log4j.category.JPOX.Enhancer=ERROR,FA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-exec-log4j2-test.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/hive-exec-log4j2-test.xml b/common/src/test/resources/hive-exec-log4j2-test.xml
new file mode 100644
index 0000000..b5f2cb4
--- /dev/null
+++ b/common/src/test/resources/hive-exec-log4j2-test.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HiveExecLog4j2Test"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">INFO</Property>
+    <Property name="hive.root.logger">FA</Property>
+    <Property name="hive.log.dir">${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging</Property>
+    <Property name="hive.log.file">hiveExecLog4j2Test.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <File name="FA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n" />
+    </File>
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages attribute.Sends counts of logging messages at different severity levels to Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="${sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <!-- Silence useless ZK logs -->
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-log4j-test.properties
----------------------------------------------------------------------
diff --git a/common/src/test/resources/hive-log4j-test.properties b/common/src/test/resources/hive-log4j-test.properties
deleted file mode 100644
index 0348325..0000000
--- a/common/src/test/resources/hive-log4j-test.properties
+++ /dev/null
@@ -1,71 +0,0 @@
-# Define some default values that can be overridden by system properties
-hive.root.logger=WARN,DRFA
-hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging
-hive.log.file=hiveLog4jTest.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-log4j2-test.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/hive-log4j2-test.xml b/common/src/test/resources/hive-log4j2-test.xml
new file mode 100644
index 0000000..63b46c8
--- /dev/null
+++ b/common/src/test/resources/hive-log4j2-test.xml
@@ -0,0 +1,95 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HiveLog4j2Test"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">WARN</Property>
+    <Property name="hive.root.logger">DRFA</Property>
+    <Property name="hive.log.dir">${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging</Property>
+    <Property name="hive.log.file">hiveLog4j2Test.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Daily Rolling File Appender -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="DRFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages attribute.Sends counts of logging messages at different severity levels to Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="${sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <!-- Silence useless ZK logs -->
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j-old.properties
----------------------------------------------------------------------
diff --git a/data/conf/hive-log4j-old.properties b/data/conf/hive-log4j-old.properties
deleted file mode 100644
index f274b8c..0000000
--- a/data/conf/hive-log4j-old.properties
+++ /dev/null
@@ -1,82 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=${build.dir.hive}/ql/tmp/
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j.properties
----------------------------------------------------------------------
diff --git a/data/conf/hive-log4j.properties b/data/conf/hive-log4j.properties
deleted file mode 100644
index 023e3c2..0000000
--- a/data/conf/hive-log4j.properties
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=${test.tmp.dir}/log/
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job!
-# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy.
-# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html
-# Add "DAILY" to hive.root.logger above if you want to use this.
-log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender
-log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
-log4j.appender.DAILY.rollingPolicy.ActiveFileName=${hive.log.dir}/${hive.log.file}
-log4j.appender.DAILY.rollingPolicy.FileNamePattern=${hive.log.dir}/${hive.log.file}.%d{yyyy-MM-dd}
-log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout
-log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
-log4j.logger.org.apache.zookeeper=INFO,DRFA
-log4j.logger.org.apache.zookeeper.server.ServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocket=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
-log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=${hive.ql.log.PerfLogger.level}
-log4j.logger.org.apache.hadoop.hive.ql.exec.Operator=INFO,DRFA
-log4j.logger.org.apache.hadoop.hive.serde2.lazy=INFO,DRFA
-log4j.logger.org.apache.hadoop.hive.metastore.ObjectStore=INFO,DRFA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/data/conf/hive-log4j2.xml b/data/conf/hive-log4j2.xml
new file mode 100644
index 0000000..c9adfa2
--- /dev/null
+++ b/data/conf/hive-log4j2.xml
@@ -0,0 +1,148 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HiveLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">DEBUG</Property>
+    <Property name="hive.root.logger">DRFA</Property>
+    <Property name="hive.log.dir">${sys:test.tmp.dir}/log</Property>
+    <Property name="hive.log.file">hive.log</Property>
+    <Property name="hive.ql.log.PerfLogger.level">INFO</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Regular File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <File name="FA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+    </File> -->
+
+    <!-- Daily Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="DRFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+    <!-- Size based Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <RollingFile name="RFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256 MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10"/>
+    </RollingFile> -->
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages attribute.Sends counts of logging messages at different severity levels to Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="{sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <Logger name="org.apache.hadoop.conf.Configuration" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper" level="INFO">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.server.ServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocket" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop.hive.ql.log.PerfLogger" level="${sys:hive.ql.log.PerfLogger.level}">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop.hive.ql.exec.Operator" level="INFO">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop.hive.serde2.lazy" level="INFO">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop.hive.metastore.ObjectStore" level="INFO">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/spark/log4j.properties
----------------------------------------------------------------------
diff --git a/data/conf/spark/log4j.properties b/data/conf/spark/log4j.properties
deleted file mode 100644
index 8838c0e..0000000
--- a/data/conf/spark/log4j.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-log4j.rootCategory=DEBUG, DRFA
-
-hive.spark.log.dir=target/tmp/log
-# Settings to quiet third party logs that are too verbose
-log4j.logger.org.eclipse.jetty=WARN
-log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.spark.log.dir}/spark.log
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/spark/log4j2.xml
----------------------------------------------------------------------
diff --git a/data/conf/spark/log4j2.xml b/data/conf/spark/log4j2.xml
new file mode 100644
index 0000000..395a2bf
--- /dev/null
+++ b/data/conf/spark/log4j2.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="SparkLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="spark.log.level">DEBUG</Property>
+    <Property name="spark.root.logger">DRFA</Property>
+    <Property name="spark.log.dir">target/tmp/log</Property>
+    <Property name="spark.log.file">spark.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Regular File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <File name="FA" fileName="${sys:spark.log.dir}/${sys:spark.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+    </File> -->
+
+    <!-- Daily Rolling File Appender -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="DRFA" fileName="${sys:spark.log.dir}/${sys:spark.log.file}"
+     filePattern="${sys:spark.log.dir}/${sys:spark.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+  </Appenders>
+
+  <Loggers>
+    <Root level="DEBUG">
+      <AppenderRef ref="${sys:spark.root.logger}" level="${sys:spark.log.level}"/>
+    </Root>
+
+    <Logger name="org.apache.spark.repl.SparkIMain$exprTyper" level="INFO">
+      <AppenderRef ref="${sys:spark.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.spark.repl.SparkILoop$SparkILoopInterpreter" level="INFO">
+      <AppenderRef ref="${sys:spark.root.logger}"/>
+    </Logger>
+    <Logger name="org.eclipse.jetty" level="WARN">
+      <AppenderRef ref="${sys:spark.root.logger}"/>
+    </Logger>
+    <Logger name="org.eclipse.jetty.util.component.AbstractLifeCycle" level="ERROR">
+      <AppenderRef ref="${sys:spark.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/docs/xdocs/language_manual/cli.xml
----------------------------------------------------------------------
diff --git a/docs/xdocs/language_manual/cli.xml b/docs/xdocs/language_manual/cli.xml
index a293382..eb91e44 100644
--- a/docs/xdocs/language_manual/cli.xml
+++ b/docs/xdocs/language_manual/cli.xml
@@ -163,7 +163,7 @@ Sample Usage:
 
 <section name="Logging" href="logging">
 <p>
-Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use <i>hive-log4j.default</i> in the <i>conf/</i> directory of the hive installation which writes out logs to <i>/tmp/$USER/hive.log</i> and uses the <i>WARN</i> level.
+Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use <i>hive-log4j2.xml</i> in the <i>conf/</i> directory of the hive installation which writes out logs to <i>/tmp/$USER/hive.log</i> and uses the <i>WARN</i> level.
 </p>
 <p>
 It is often desirable to emit the logs to the standard output and/or change the logging level for debugging purposes. These can be done from the command line as follows: </p>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/bin/hcat_server.sh
----------------------------------------------------------------------
diff --git a/hcatalog/bin/hcat_server.sh b/hcatalog/bin/hcat_server.sh
index 6b09d3e..d1aecb8 100644
--- a/hcatalog/bin/hcat_server.sh
+++ b/hcatalog/bin/hcat_server.sh
@@ -84,7 +84,7 @@ function start_hcat() {
   export AUX_CLASSPATH=${AUX_CLASSPATH}
 
   export HADOOP_HOME=$HADOOP_HOME
-  #export HADOOP_OPTS="-Dlog4j.configuration=file://${HCAT_PREFIX}/conf/log4j.properties"
+  #export HADOOP_OPTS="-Dlog4j.configurationFile=file://${HCAT_PREFIX}/conf/log4j2.xml"
   export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${HCAT_LOG_DIR}/hcat_err_pid%p.log -Xloggc:${HCAT_LOG_DIR}/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
   export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-2048} # 8G is better if you have it
   export METASTORE_PORT=${METASTORE_PORT:-9083}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/bin/templeton.cmd
----------------------------------------------------------------------
diff --git a/hcatalog/bin/templeton.cmd b/hcatalog/bin/templeton.cmd
index e9a735d..759f654 100644
--- a/hcatalog/bin/templeton.cmd
+++ b/hcatalog/bin/templeton.cmd
@@ -59,9 +59,9 @@ setlocal enabledelayedexpansion
 
   if not defined TEMPLETON_LOG4J (
     @rem must be prefixed with file: otherwise config is not picked up
-    set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j.properties
+    set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j2.xml
   )
-  set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configuration=%TEMPLETON_LOG4J% %HADOOP_OPTS%
+  set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configurationFile=%TEMPLETON_LOG4J% %HADOOP_OPTS%
   set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hive.hcatalog.templeton.Main
   
   if defined service_entry (

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/scripts/hcat_server_start.sh
----------------------------------------------------------------------
diff --git a/hcatalog/scripts/hcat_server_start.sh b/hcatalog/scripts/hcat_server_start.sh
index 1670b70..872d1b5 100755
--- a/hcatalog/scripts/hcat_server_start.sh
+++ b/hcatalog/scripts/hcat_server_start.sh
@@ -70,7 +70,7 @@ export AUX_CLASSPATH=${AUX_CLASSPATH}
 
 
 export HADOOP_HOME=$HADOOP_HOME
-#export HADOOP_OPTS="-Dlog4j.configuration=file://${ROOT}/conf/log4j.properties"
+#export HADOOP_OPTS="-Dlog4j.configurationFile=file://${ROOT}/conf/log4j2.xml"
 export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${ROOT}/var/log/hcat_err_pid%p.log -Xloggc:${ROOT}/var/log/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
 export HADOOP_HEAPSIZE=2048 # 8G is better if you have it
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
----------------------------------------------------------------------
diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
index 9757b9c..6385e40 100644
--- a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
+++ b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
@@ -66,7 +66,7 @@ ${env.PIG_HOME}/bin/pig
      uncertainty.</p>
 
   <p><strong>Note:</strong> The location of the log files created by Templeton and some other properties
-     of the logging system are set in the webhcat-log4j.properties file.</p>
+     of the logging system are set in the webhcat-log4j2.xml file.</p>
 
   <section>
   <title>Variables</title>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
----------------------------------------------------------------------
diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
index 16da248..e2953a9 100644
--- a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
+++ b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
@@ -241,7 +241,7 @@
 
     <p>Server activity logs are located in
     <em>root</em><code>/var/log/hcat_server</code>.  Logging configuration is located at
-    <em>root</em><code>/conf/log4j.properties</code>.  Server logging uses
+    <em>root</em><code>/conf/log4j2.xml</code>.  Server logging uses
     <code>DailyRollingFileAppender</code> by default. It will generate a new
     file per day and does not expire old log files automatically.</p>
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
----------------------------------------------------------------------
diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
deleted file mode 100644
index 82684b3..0000000
--- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=/tmp/ekoifman
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=${hive.log.threshold}
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml
new file mode 100644
index 0000000..30f7603
--- /dev/null
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HCatE2ELog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">DEBUG</Property>
+    <Property name="hive.root.logger">DRFA</Property>
+    <Property name="hive.log.dir">${sys:java.io.tmpdir}/${sys:user.name}</Property>
+    <Property name="hive.log.file">hive.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Regular File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <File name="FA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+    </File> -->
+
+    <!-- Daily Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="DRFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+    <!-- Size based Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different file names -->
+    <!-- <RollingFile name="RFA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" />
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256 MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10"/>
+    </RollingFile> -->
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages attribute.Sends counts of logging messages at different severity levels to Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" level="${sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
----------------------------------------------------------------------
diff --git a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
index 8cc9353..e59177c 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
@@ -31,7 +31,7 @@ cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.
 #cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml ${HIVE_HOME}/conf/hive-site.xml
 
 cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml
-cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties ${HIVE_HOME}/conf/hive-log4j.properties
+cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml ${HIVE_HOME}/conf/hive-log4j2.xml
 
 if [ -f ${MYSQL_CLIENT_JAR} ]; then
   cp ${MYSQL_CLIENT_JAR} ${HIVE_HOME}/lib

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
index 0be8dde..c80fdd5 100644
--- a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
+++ b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
@@ -215,11 +215,11 @@ else
 fi
 
 if [[ -z "$WEBHCAT_LOG4J" ]]; then
-  WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j.properties";
+  WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j2.xml";
 fi
 
 export HADOOP_USER_CLASSPATH_FIRST=true
-export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configuration=$WEBHCAT_LOG4J"
+export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configurationFile=$WEBHCAT_LOG4J"
 
 start_cmd="$HADOOP_PREFIX/bin/hadoop jar $JAR org.apache.hive.hcatalog.templeton.Main  "
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties
deleted file mode 100644
index 866052c..0000000
--- a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties
+++ /dev/null
@@ -1,45 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-webhcat.root.logger = INFO, standard
-webhcat.log.dir = .
-webhcat.log.file = webhcat.log
-
-log4j.rootLogger = ${webhcat.root.logger}
-
-# Logging Threshold
-log4j.threshhold = DEBUG
-
-log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender
-log4j.appender.standard.File = ${webhcat.log.dir}/${webhcat.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern = .yyyy-MM-dd
-
-log4j.appender.DRFA.layout = org.apache.log4j.PatternLayout
-
-log4j.appender.standard.layout = org.apache.log4j.PatternLayout
-log4j.appender.standard.layout.conversionPattern = %-5p | %d{DATE} | %c | %m%n
-
-# Class logging settings
-log4j.logger.com.sun.jersey = DEBUG
-log4j.logger.com.sun.jersey.spi.container.servlet.WebComponent = ERROR
-log4j.logger.org.apache.hadoop = INFO
-log4j.logger.org.apache.hadoop.conf = WARN
-log4j.logger.org.apache.zookeeper = WARN
-log4j.logger.org.eclipse.jetty = INFO

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml
new file mode 100644
index 0000000..40da974
--- /dev/null
+++ b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="WebhcatLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="webhcat.log.threshold">ALL</Property>
+    <Property name="webhcat.log.level">INFO</Property>
+    <Property name="webhcat.root.logger">standard</Property>
+    <Property name="webhcat.log.dir">.</Property>
+    <Property name="webhcat.log.file">webhcat.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Daily Rolling File Appender -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session -->
+    <RollingFile name="standard" fileName="${sys:webhcat.log.dir}/${sys:webhcat.log.file}"
+     filePattern="${sys:webhcat.log.dir}/${sys:webhcat.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%-5p | %d{DATE} | %c | %m%n" />
+      <Policies>
+        <!-- Rollover at mignight (interval = 1 means daily) -->
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <!-- 30-day backup -->
+      <!-- <DefaultRolloverStrategy max="30"/> -->
+    </RollingFile>
+
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:webhcat.log.threshold}">
+      <AppenderRef ref="${sys:webhcat.root.logger}" level="${sys:webhcat.log.level}"/>
+    </Root>
+
+    <Logger name="com.sun.jersey" level="DEBUG">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+    <Logger name="com.sun.jersey.spi.container.servlet.WebComponent" level="ERROR">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop" level="INFO">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.hadoop.conf" level="WARN">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper" level="WARN">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+    <Logger name="org.eclipse.jetty" level="INFO">
+      <AppenderRef ref="${sys:webhcat.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
index 73f470c..268c218 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -670,6 +670,8 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
     if (!parseArguments(args)) {
       return 1;
     }
+    // specify the default log4j2 properties file.
+    System.setProperty("log4j.configurationFile", "hive-log4j2.xml");
     conf = new Conf();
     conf.init();    
     conn = new Conn(this);

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
new file mode 100644
index 0000000..93c16de
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
@@ -0,0 +1,136 @@
+package org.apache.hive.service.cli.operation;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.FetchType;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Tests to verify operation logging layout for different modes.
+ */
+public class TestOperationLoggingLayout {
+  protected static HiveConf hiveConf;
+  protected static String tableName;
+  private File dataFile;
+  protected CLIServiceClient client;
+  protected static MiniHS2 miniHS2 = null;
+  protected static Map<String, String> confOverlay;
+  protected SessionHandle sessionHandle;
+  protected final String sql = "select * from " + tableName;
+  private final String sqlCntStar = "select count(*) from " + tableName;
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    tableName = "TestOperationLoggingLayout_table";
+    hiveConf = new HiveConf();
+    hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution");
+    // We need to set the below parameter to test performance level logging
+    hiveConf.set("hive.ql.log.PerfLogger.level", "INFO,DRFA");
+    miniHS2 = new MiniHS2(hiveConf);
+    confOverlay = new HashMap<String, String>();
+    confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+    miniHS2.start(confOverlay);
+  }
+
+  /**
+   * Open a session, and create a table for cases usage
+   *
+   * @throws Exception
+   */
+  @Before
+  public void setUp() throws Exception {
+    dataFile = new File(hiveConf.get("test.data.files"), "kv1.txt");
+    client = miniHS2.getServiceClient();
+    sessionHandle = setupSession();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    // Cleanup
+    String queryString = "DROP TABLE " + tableName;
+    client.executeStatement(sessionHandle, queryString, null);
+
+    client.closeSession(sessionHandle);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    miniHS2.stop();
+  }
+
+  @Test
+  public void testSwitchLogLayout() throws Exception {
+    // verify whether the sql operation log is generated and fetch correctly.
+    OperationHandle operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null);
+    RowSet rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 1000,
+        FetchType.LOG);
+    Iterator<Object[]> iter = rowSetLog.iterator();
+    // non-verbose pattern is %-5p : %m%n. Look for " : "
+    while (iter.hasNext()) {
+      String row = iter.next()[0].toString();
+      Assert.assertEquals(true, row.matches("^(FATAL|ERROR|WARN|INFO|DEBUG|TRACE).*$"));
+    }
+
+    String queryString = "set hive.server2.logging.operation.level=verbose";
+    client.executeStatement(sessionHandle, queryString, null);
+    operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null);
+    // just check for first few lines, some log lines are multi-line strings which can break format
+    // checks below
+    rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 10,
+        FetchType.LOG);
+    iter = rowSetLog.iterator();
+    // verbose pattern is "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"
+    while (iter.hasNext()) {
+      String row = iter.next()[0].toString();
+      // just check if the log line starts with date
+      Assert.assertEquals(true,
+          row.matches("^\\d{2}[/](0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01]).*$"));
+    }
+  }
+
+  private SessionHandle setupSession() throws Exception {
+    // Open a session
+    SessionHandle sessionHandle = client.openSession(null, null, null);
+
+    // Change lock manager to embedded mode
+    String queryString = "SET hive.lock.manager=" +
+        "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
+    client.executeStatement(sessionHandle, queryString, null);
+
+    // Drop the table if it exists
+    queryString = "DROP TABLE IF EXISTS " + tableName;
+    client.executeStatement(sessionHandle, queryString, null);
+
+    // Create a test table
+    queryString = "create table " + tableName + " (key int, value string)";
+    client.executeStatement(sessionHandle, queryString, null);
+
+    // Load data
+    queryString = "load data local inpath '" + dataFile + "' into table " + tableName;
+    client.executeStatement(sessionHandle, queryString, null);
+
+    // Precondition check: verify whether the table is created and data is fetched correctly.
+    OperationHandle operationHandle = client.executeStatement(sessionHandle, sql, null);
+    RowSet rowSetResult = client.fetchResults(operationHandle);
+    Assert.assertEquals(500, rowSetResult.numRows());
+    Assert.assertEquals(238, rowSetResult.iterator().next()[0]);
+    Assert.assertEquals("val_238", rowSetResult.iterator().next()[1]);
+
+    return sessionHandle;
+  }
+}