You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/29 00:08:34 UTC
svn commit: r1635003 - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
ql/src/java/org/apache/hadoop/hive/ql/Driver.java
Author: brock
Date: Tue Oct 28 23:08:34 2014
New Revision: 1635003
URL: http://svn.apache.org/r1635003
Log:
HIVE-8600 - Add option to log explain output for query (Mohit Sabharwal via Brock)
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1635003&r1=1635002&r2=1635003&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Oct 28 23:08:34 2014
@@ -1471,6 +1471,10 @@ public class HiveConf extends Configurat
"If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
"which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
+ HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,
+ "Whether to log explain output for every query.\n" +
+ "When enabled, will log EXPLAIN EXTENDED output for the query at INFO log4j log level."),
+
// prefix used to auto generated column aliases (this should be started with '_')
HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c",
"String used as a prefix when auto generating column alias.\n" +
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1635003&r1=1635002&r2=1635003&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue Oct 28 23:08:34 2014
@@ -18,8 +18,10 @@
package org.apache.hadoop.hive.ql;
+import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.IOException;
+import java.io.PrintStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
@@ -43,6 +45,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hadoop.hive.ql.exec.ConditionalTask;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.MoveTask;
import org.apache.hadoop.hive.ql.exec.Operator;
@@ -69,7 +72,6 @@ import org.apache.hadoop.hive.ql.lockmgr
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
-import org.apache.hadoop.hive.ql.lockmgr.TxnManagerFactory;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
@@ -467,6 +469,13 @@ public class Driver implements CommandPr
}
}
+ if (conf.getBoolVar(ConfVars.HIVE_LOG_EXPLAIN_OUTPUT)) {
+ String explainOutput = getExplainOutput(sem, plan, tree.dump());
+ if (explainOutput != null) {
+ LOG.info("EXPLAIN output: " + explainOutput);
+ }
+ }
+
return 0;
} catch (Exception e) {
ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
@@ -494,6 +503,33 @@ public class Driver implements CommandPr
}
/**
+ * Returns EXPLAIN EXTENDED output for a semantically
+ * analyzed query.
+ *
+ * @param sem semantic analyzer for analyzed query
+ * @param plan query plan
+ * @param astStringTree AST tree dump
+ * @throws java.io.IOException
+ */
+ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan,
+ String astStringTree) throws IOException {
+ String ret = null;
+ ExplainTask task = new ExplainTask();
+ task.initialize(conf, plan, null);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ PrintStream ps = new PrintStream(baos);
+ try {
+ task.getJSONPlan(ps, astStringTree, sem.getRootTasks(), sem.getFetchTask(),
+ false, true, true);
+ ret = baos.toString();
+ } catch (Exception e) {
+ LOG.warn("Exception generating explain output: " + e, e);
+ }
+
+ return ret;
+ }
+
+ /**
* Do authorization using post semantic analysis information in the semantic analyzer
* The original command is also passed so that authorization interface can provide
* more useful information in logs.