You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/13 04:50:13 UTC
svn commit: r1617653 -
/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
Author: brock
Date: Wed Aug 13 02:50:13 2014
New Revision: 1617653
URL: http://svn.apache.org/r1617653
Log:
HIVE-7607 - Spark "Explain" should give useful information on dependencies (Chao via Brock) [Spark Branch]
Modified:
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1617653&r1=1617652&r2=1617653&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Aug 13 02:50:13 2014
@@ -47,11 +47,7 @@ import org.apache.hadoop.hive.ql.hooks.R
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.ExplainWork;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.OperatorDesc;
-import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.apache.hadoop.hive.ql.plan.*;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -426,6 +422,33 @@ public class ExplainTask extends Task<Ex
json.accumulate(ent.getKey().toString(), jsonDep);
}
}
+ } else if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty()
+ && ((List<?>)ent.getValue()).get(0) != null &&
+ ((List<?>)ent.getValue()).get(0) instanceof SparkWork.Dependency) {
+ if (out != null) {
+ boolean isFirst = true;
+ for (SparkWork.Dependency dep: (List<SparkWork.Dependency>)ent.getValue()) {
+ if (!isFirst) {
+ out.print(", ");
+ } else {
+ out.print("<- ");
+ isFirst = false;
+ }
+ out.print(dep.getName());
+ out.print(" (");
+ out.print(dep.getShuffleType());
+ out.print(")");
+ }
+ out.println();
+ }
+ if (jsonOutput) {
+ for (SparkWork.Dependency dep: (List<SparkWork.Dependency>)ent.getValue()) {
+ JSONObject jsonDep = new JSONObject();
+ jsonDep.put("parent", dep.getName());
+ jsonDep.put("type", dep.getShuffleType());
+ json.accumulate(ent.getKey().toString(), jsonDep);
+ }
+ }
} else {
if (out != null) {
out.print(ent.getValue().toString());