You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/11/22 10:17:08 UTC
svn commit: r1412465 - in /hive/trunk/ql/src:
java/org/apache/hadoop/hive/ql/exec/ test/queries/clientpositive/
test/results/clientnegative/ test/results/clientpositive/
Author: namit
Date: Thu Nov 22 09:17:07 2012
New Revision: 1412465
URL: http://svn.apache.org/viewvc?rev=1412465&view=rev
Log:
HIVE-3703 Hive Query Explain Plan JSON not being created properly
(Mayank Garg via namit)
Added:
hive/trunk/ql/src/test/queries/clientpositive/plan_json.q
hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
hive/trunk/ql/src/test/results/clientnegative/alter_view_failure5.q.out
hive/trunk/ql/src/test/results/clientnegative/alter_view_failure7.q.out
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1412465&r1=1412464&r2=1412465&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Thu Nov 22 09:17:07 2012
@@ -42,13 +42,14 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
-import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.StringUtils;
+import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -251,12 +252,13 @@ public class ExplainTask extends Task<Ex
return jsonOutput ? json : null;
}
- private static String outputList(List<?> l, String header, PrintStream out,
+ private static JSONArray outputList(List<?> l, String header, PrintStream out,
boolean extended, boolean jsonOutput, int indent) throws Exception {
boolean first_el = true;
boolean nl = false;
- StringBuffer s = new StringBuffer();
+ JSONArray outputArray = new JSONArray();
+
for (Object o : l) {
if (first_el && (out != null)) {
out.print(header);
@@ -270,8 +272,7 @@ public class ExplainTask extends Task<Ex
}
if (jsonOutput) {
- s.append(delim);
- s.append(o);
+ outputArray.put(o);
}
nl = true;
}
@@ -282,10 +283,7 @@ public class ExplainTask extends Task<Ex
JSONObject jsonOut = outputPlan((Serializable) o, out, extended,
jsonOutput, jsonOutput ? 0 : indent + 2);
if (jsonOutput) {
- if (!first_el) {
- s.append(", ");
- }
- s.append(jsonOut);
+ outputArray.put(jsonOut);
}
}
@@ -295,7 +293,8 @@ public class ExplainTask extends Task<Ex
if (nl && (out != null)) {
out.println();
}
- return jsonOutput ? s.toString() : null;
+
+ return jsonOutput ? outputArray : null;
}
private static boolean isPrintable(Object val) {
@@ -400,6 +399,7 @@ public class ExplainTask extends Task<Ex
header = indentString(prop_indents);
}
+ // Try the output as a primitive object
if (isPrintable(val)) {
if (out != null) {
out.printf("%s ", header);
@@ -428,11 +428,13 @@ public class ExplainTask extends Task<Ex
// Try this as a list
try {
List<?> l = (List<?>) val;
- String jsonOut = outputList(l, header, out, extended, jsonOutput,
+ JSONArray jsonOut = outputList(l, header, out, extended, jsonOutput,
jsonOutput ? 0 : prop_indents + 2);
+
if (jsonOutput) {
json.put(header, jsonOut);
}
+
continue;
}
catch (ClassCastException ce) {
@@ -468,7 +470,6 @@ public class ExplainTask extends Task<Ex
return json;
}
-
return null;
}
Added: hive/trunk/ql/src/test/queries/clientpositive/plan_json.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/plan_json.q?rev=1412465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/plan_json.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/plan_json.q Thu Nov 22 09:17:07 2012
@@ -0,0 +1,3 @@
+-- explain plan json: the query gets the formatted json output of the query plan of the hive query
+
+EXPLAIN FORMATTED SELECT count(1) FROM src;
Modified: hive/trunk/ql/src/test/results/clientnegative/alter_view_failure5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure5.q.out?rev=1412465&r1=1412464&r2=1412465&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/alter_view_failure5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/alter_view_failure5.q.out Thu Nov 22 09:17:07 2012
@@ -15,4 +15,4 @@ SELECT * FROM src
POSTHOOK: type: CREATEVIEW
POSTHOOK: Output: default@xxx6
#### A masked pattern was here ####
-FAILED: SemanticException value not found in table's partition spec: {v=val_86}
+FAILED: SemanticException [Error 10214]: Invalid partition spec specified value not found in table's partition spec: {v=val_86}
Modified: hive/trunk/ql/src/test/results/clientnegative/alter_view_failure7.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure7.q.out?rev=1412465&r1=1412464&r2=1412465&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/alter_view_failure7.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/alter_view_failure7.q.out Thu Nov 22 09:17:07 2012
@@ -15,4 +15,4 @@ SELECT key,ds,hr FROM srcpart
POSTHOOK: type: CREATEVIEW
POSTHOOK: Output: default@xxx8
#### A masked pattern was here ####
-FAILED: SemanticException table is partitioned but partition spec is not specified or does not fully match table partitioning: {ds=2011-01-01}
+FAILED: SemanticException [Error 10214]: Invalid partition spec specified table is partitioned but partition spec is not specified or does not fully match table partitioning: {ds=2011-01-01}
Added: hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out?rev=1412465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out Thu Nov 22 09:17:07 2012
@@ -0,0 +1,9 @@
+PREHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+POSTHOOK: type: QUERY
+{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"GBY_4":{"SEL_5":{"FS_6":{"File Output Operator":{"GlobalTableId:":"0","compressed:":"false","table:":{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}}}}},"Alias -> Map Operator Tree:":{"src":{"TS_0":{"SEL_1":{"GBY_2":{"RS_3":{"Reduce Output Operator":{"Map-reduce partition columns:":[],"sort order:":"","tag:":"-1","value expressions:":[{"type:":"bigint","expr:":"_col0"}],"key expressions:":[]}}}}}}},"Percentage Sample:":{}}},"Stage-0":{"Fetch Operator":{"limit:":"-1"}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"ROOT STAGE":"TRUE"}},"ABSTRACT SYNTAX TREE":"(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))"}