You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2014/11/08 19:36:22 UTC

svn commit: r1637584 - in /hive/branches/branch-0.14: common/src/java/org/apache/hadoop/hive/conf/ itests/hive-unit/src/test/java/org/apache/hive/beeline/ itests/hive-unit/src/test/java/org/apache/hive/jdbc/ itests/hive-unit/src/test/java/org/apache/hi...

Author: thejas
Date: Sat Nov  8 18:36:21 2014
New Revision: 1637584

URL: http://svn.apache.org/r1637584
Log:
HIVE-8785 : HiveServer2 LogDivertAppender should be more selective for beeline getLogs (Thejas Nair, reviewed by Gopal V)

Modified:
    hive/branches/branch-0.14/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
    hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
    hive/branches/branch-0.14/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
    hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
    hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
    hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
    hive/branches/branch-0.14/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java

Modified: hive/branches/branch-0.14/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/branch-0.14/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Sat Nov  8 18:36:21 2014
@@ -1527,12 +1527,13 @@ public class HiveConf extends Configurat
 
     // operation log configuration
     HIVE_SERVER2_LOGGING_OPERATION_ENABLED("hive.server2.logging.operation.enabled", true,
-        "When true, HS2 will save operation logs"),
+        "When true, HS2 will save operation logs and make them available for clients"),
     HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION("hive.server2.logging.operation.log.location",
         "${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + File.separator +
             "operation_logs",
         "Top level directory where operation logs are stored if logging functionality is enabled"),
-
+    HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false,
+            "When true, HS2 operation logs available for clients will be verbose"),
     // logging configuration
     HIVE_LOG4J_FILE("hive.log4j.file", "",
         "Hive log4j configuration file.\n" +

Modified: hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Sat Nov  8 18:36:21 2014
@@ -529,7 +529,7 @@ public class TestBeeLineWithArgs {
   public void testQueryProgress() throws Throwable {
     final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
         "select count(*) from " + tableName + ";\n";
-    final String EXPECTED_PATTERN = "Parsing command";
+    final String EXPECTED_PATTERN = "number of splits";
     testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
   }
 

Modified: hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Sat Nov  8 18:36:21 2014
@@ -50,6 +50,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
@@ -104,6 +105,7 @@ public class TestJdbcDriver2 {
   public static void setUpBeforeClass() throws SQLException, ClassNotFoundException{
     Class.forName(driverName);
     Connection con1 = getConnection("default");
+    System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, "" + true);
 
     Statement stmt1 = con1.createStatement();
     assertNotNull("Statement is null", stmt1);

Modified: hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java (original)
+++ hive/branches/branch-0.14/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java Sat Nov  8 18:36:21 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hive.service.cli;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
@@ -35,6 +36,7 @@ public class TestEmbeddedThriftBinaryCLI
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     service = new EmbeddedThriftBinaryCLIService();
+    service.init(new HiveConf());
     client = new ThriftCLIServiceClient(service);
   }
 

Modified: hive/branches/branch-0.14/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/branch-0.14/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Sat Nov  8 18:36:21 2014
@@ -52,6 +52,7 @@ import javax.security.sasl.SaslException
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.auth.HiveAuthFactory;
@@ -143,7 +144,9 @@ public class HiveConnection implements j
     isEmbeddedMode = connParams.isEmbeddedMode();
 
     if (isEmbeddedMode) {
-      client = new EmbeddedThriftBinaryCLIService();
+      EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService();
+      embeddedClient.init(new HiveConf());
+      client = embeddedClient;
     } else {
       // extract user/password from JDBC connection properties if its not supplied in the
       // connection URL

Modified: hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (original)
+++ hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java Sat Nov  8 18:36:21 2014
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.Context;
@@ -170,7 +171,8 @@ public class TezTask extends Task<TezWor
       counters = client.getDAGStatus(statusGetOpts).getDAGCounters();
       TezSessionPoolManager.getInstance().returnSession(session);
 
-      if (LOG.isInfoEnabled() && counters != null) {
+      if (LOG.isInfoEnabled() && counters != null
+          && conf.getBoolVar(conf, HiveConf.ConfVars.TEZ_EXEC_SUMMARY)) {
         for (CounterGroup group: counters) {
           LOG.info(group.getDisplayName() +":");
           for (TezCounter counter: group) {

Modified: hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (original)
+++ hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java Sat Nov  8 18:36:21 2014
@@ -18,13 +18,17 @@
 
 package org.apache.hive.service.cli.operation;
 import java.io.CharArrayWriter;
+import java.util.regex.Pattern;
 
+import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.log4j.Layout;
 import org.apache.log4j.Logger;
 import org.apache.log4j.WriterAppender;
 import org.apache.log4j.spi.Filter;
 import org.apache.log4j.spi.LoggingEvent;
 
+import com.google.common.base.Joiner;
+
 /**
  * An Appender to divert logs from individual threads to the LogObject they belong to.
  */
@@ -33,20 +37,29 @@ public class LogDivertAppender extends W
   private final OperationManager operationManager;
 
   /**
-   * A log filter that exclude messages coming from the logger with the given name.
-   * We apply this filter on the Loggers used by the log diversion stuff, so that
+   * A log filter that filters messages coming from the logger with the given names.
+   * It be used as a white list filter or a black list filter.
+   * We apply black list filter on the Loggers used by the log diversion stuff, so that
    * they don't generate more logs for themselves when they process logs.
+   * White list filter is used for less verbose log collection
    */
-  private static class NameExclusionFilter extends Filter {
-    private String excludeLoggerName = null;
-
-    public NameExclusionFilter(String excludeLoggerName) {
-      this.excludeLoggerName = excludeLoggerName;
+  private static class NameFilter extends Filter {
+    private final Pattern namePattern;
+    private final boolean excludeMatches;
+
+    public NameFilter(boolean isExclusionFilter, String [] loggerNames) {
+      this.excludeMatches = isExclusionFilter;
+      String matchRegex = Joiner.on("|").join(loggerNames);
+      this.namePattern = Pattern.compile(matchRegex);
     }
 
     @Override
     public int decide(LoggingEvent ev) {
-      if (ev.getLoggerName().equals(excludeLoggerName)) {
+      boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
+      if (excludeMatches == isMatch) {
+        // Deny if this is black-list filter (excludeMatches = true) and it
+        // matched
+        // or if this is whitelist filter and it didn't match
         return Filter.DENY;
       }
       return Filter.NEUTRAL;
@@ -56,21 +69,29 @@ public class LogDivertAppender extends W
   /** This is where the log message will go to */
   private final CharArrayWriter writer = new CharArrayWriter();
 
-  public LogDivertAppender(Layout layout, OperationManager operationManager) {
+  public LogDivertAppender(Layout layout, OperationManager operationManager, boolean isVerbose) {
     setLayout(layout);
     setWriter(writer);
     setName("LogDivertAppender");
     this.operationManager = operationManager;
 
-    // Filter out messages coming from log processing classes, or we'll run an infinite loop.
-    addFilter(new NameExclusionFilter(LOG.getName()));
-    addFilter(new NameExclusionFilter(OperationLog.class.getName()));
-    addFilter(new NameExclusionFilter(OperationManager.class.getName()));
+    if (isVerbose) {
+      // Filter out messages coming from log processing classes, or we'll run an
+      // infinite loop.
+      String[] exclLoggerNames = { LOG.getName(), OperationLog.class.getName(),
+          OperationManager.class.getName() };
+      addFilter(new NameFilter(true, exclLoggerNames));
+    } else {
+      // in non verbose mode, show only select logger messages
+      String[] inclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter",
+          "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName() };
+      addFilter(new NameFilter(false, inclLoggerNames));
+    }
   }
 
   /**
-   * Overrides WriterAppender.subAppend(), which does the real logging.
-   * No need to worry about concurrency since log4j calls this synchronously.
+   * Overrides WriterAppender.subAppend(), which does the real logging. No need
+   * to worry about concurrency since log4j calls this synchronously.
    */
   @Override
   protected void subAppend(LoggingEvent event) {

Modified: hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original)
+++ hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Sat Nov  8 18:36:21 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hive.service.cli.operation;
 
-import java.util.Enumeration;
 import java.util.ArrayList;
+import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -30,16 +30,26 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hive.service.AbstractService;
-import org.apache.hive.service.cli.*;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationStatus;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
+import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
-import org.apache.log4j.*;
+import org.apache.log4j.Appender;
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
 
 /**
  * OperationManager.
  *
  */
 public class OperationManager extends AbstractService {
-  private static final String DEFAULT_LAYOUT_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
   private final Log LOG = LogFactory.getLog(OperationManager.class.getName());
 
   private HiveConf hiveConf;
@@ -54,7 +64,8 @@ public class OperationManager extends Ab
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
     if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
-      initOperationLogCapture();
+      boolean isVerbose = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE);
+      initOperationLogCapture(isVerbose);
     } else {
       LOG.debug("Operation level logging is turned off");
     }
@@ -73,7 +84,7 @@ public class OperationManager extends Ab
     super.stop();
   }
 
-  private void initOperationLogCapture() {
+  private void initOperationLogCapture(boolean isVerbose) {
     // There should be a ConsoleAppender. Copy its Layout.
     Logger root = Logger.getRootLogger();
     Layout layout = null;
@@ -87,13 +98,19 @@ public class OperationManager extends Ab
       }
     }
 
-    if (layout == null) {
-      layout = new PatternLayout(DEFAULT_LAYOUT_PATTERN);
-      LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
-    }
+    final String VERBOSE_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
+    final String NONVERBOSE_PATTERN = "%-5p : %m%n";
 
+    if (isVerbose) {
+      if (layout == null) {
+        layout = new PatternLayout(VERBOSE_PATTERN);
+        LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
+      }
+    } else {
+      layout = new PatternLayout(NONVERBOSE_PATTERN);
+    }
     // Register another Appender (with the same layout) that talks to us.
-    Appender ap = new LogDivertAppender(layout, this);
+    Appender ap = new LogDivertAppender(layout, this, isVerbose);
     root.addAppender(ap);
   }
 

Modified: hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java (original)
+++ hive/branches/branch-0.14/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java Sat Nov  8 18:36:21 2014
@@ -33,8 +33,13 @@ public class EmbeddedThriftBinaryCLIServ
     super(new CLIService(null));
     isEmbedded = true;
     HiveConf.setLoadHiveServer2Config(true);
-    cliService.init(new HiveConf());
+  }
+
+  @Override
+  public synchronized void init(HiveConf hiveConf) {
+    cliService.init(hiveConf);
     cliService.start();
+    super.init(hiveConf);
   }
 
   public ICLIService getService() {

Modified: hive/branches/branch-0.14/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.14/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java?rev=1637584&r1=1637583&r2=1637584&view=diff
==============================================================================
--- hive/branches/branch-0.14/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java (original)
+++ hive/branches/branch-0.14/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java Sat Nov  8 18:36:21 2014
@@ -17,29 +17,38 @@
  */
 package org.apache.hive.service.cli.operation;
 
-import org.junit.Assert;
+import java.io.File;
+
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.cli.*;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.FetchType;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationStatus;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.File;
-
 /**
  * TestOperationLoggingAPI
  * Test the FetchResults of TFetchType.LOG in thrift level.
  */
 public class TestOperationLoggingAPI {
-  private HiveConf hiveConf = new HiveConf();
-  private String tableName = "testOperationLoggingAPI_table";
+  private static HiveConf hiveConf;
+  private final String tableName = "testOperationLoggingAPI_table";
   private File dataFile;
   private ThriftCLIServiceClient client;
   private SessionHandle sessionHandle;
-  private String sql = "select * from " + tableName;
-  private String[] expectedLogs = {
+  private final String sql = "select * from " + tableName;
+  private final String[] expectedLogs = {
     "Parsing command",
     "Parse Completed",
     "Starting Semantic Analysis",
@@ -47,6 +56,12 @@ public class TestOperationLoggingAPI {
     "Starting command"
   };
 
+  @BeforeClass
+  public static void setUpBeforeClass() {
+    hiveConf = new HiveConf();
+    hiveConf.setBoolean(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, true);
+  }
+
   /**
    * Start embedded mode, open a session, and create a table for cases usage
    * @throws Exception
@@ -247,7 +262,7 @@ public class TestOperationLoggingAPI {
 
   private void verifyFetchedLog(String logs) {
     for (String log : expectedLogs) {
-      Assert.assertTrue(logs.contains(log));
+      Assert.assertTrue("Checking for presence of " + log, logs.contains(log));
     }
   }
 }