You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2017/02/22 01:41:45 UTC
hive git commit: HIVE-15938 : position alias in order by fails for
union queries (Sergey Shelukhin, reviewed by Ashutosh Chauhan,
Pengcheng Xiong)
Repository: hive
Updated Branches:
refs/heads/master ffe735766 -> 78e4bb79a
HIVE-15938 : position alias in order by fails for union queries (Sergey Shelukhin, reviewed by Ashutosh Chauhan, Pengcheng Xiong)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/78e4bb79
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/78e4bb79
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/78e4bb79
Branch: refs/heads/master
Commit: 78e4bb79a2f9e74acb8144db1854e5b9ad369f0f
Parents: ffe7357
Author: Sergey Shelukhin <se...@apache.org>
Authored: Tue Feb 21 17:15:14 2017 -0800
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Tue Feb 21 17:41:44 2017 -0800
----------------------------------------------------------------------
.../java/org/apache/hadoop/hive/ql/Driver.java | 6 +-
.../metadata/HiveMaterializedViewsRegistry.java | 3 +-
.../calcite/translator/ASTBuilder.java | 38 +--
.../index/RewriteParseContextGenerator.java | 4 +-
.../hadoop/hive/ql/parse/CalcitePlanner.java | 51 ++-
.../ql/parse/ColumnStatsAutoGatherContext.java | 4 +-
.../ql/parse/ColumnStatsSemanticAnalyzer.java | 4 +-
.../apache/hadoop/hive/ql/parse/HiveParser.g | 11 +-
.../apache/hadoop/hive/ql/parse/ParseUtils.java | 190 +++++++++++-
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 7 +-
.../ql/parse/UpdateDeleteSemanticAnalyzer.java | 5 +-
.../hadoop/hive/ql/tools/LineageInfo.java | 5 +-
.../ql/parse/TestMacroSemanticAnalyzer.java | 4 +-
.../parse/TestUpdateDeleteSemanticAnalyzer.java | 4 +-
.../authorization/AuthorizationTestUtil.java | 2 +-
.../queries/clientpositive/union_pos_alias.q | 30 ++
.../clientpositive/constant_prop_1.q.out | 4 +-
.../clientpositive/union_pos_alias.q.out | 308 +++++++++++++++++++
18 files changed, 611 insertions(+), 69 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 2423471..592b1f1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -118,7 +118,6 @@ import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
@@ -466,9 +465,7 @@ public class Driver implements CommandProcessor {
ctx.setHDFSCleanup(true);
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARSE);
- ParseDriver pd = new ParseDriver();
- ASTNode tree = pd.parse(command, ctx);
- tree = ParseUtils.findRootNonNullToken(tree);
+ ASTNode tree = ParseUtils.parse(command, ctx);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARSE);
// Trigger query hook before compilation
@@ -646,6 +643,7 @@ public class Driver implements CommandProcessor {
}
}
+
private int handleInterruption(String msg) {
SQLState = "HY008"; //SQLState for cancel operation
errorMessage = "FAILED: command has been interrupted: " + msg;
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
index 89c87cd..1d78b4c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
@@ -328,8 +328,7 @@ public final class HiveMaterializedViewsRegistry {
private static RelNode parseQuery(String viewQuery) {
try {
- final ParseDriver pd = new ParseDriver();
- final ASTNode node = ParseUtils.findRootNonNullToken(pd.parse(viewQuery));
+ final ASTNode node = ParseUtils.parse(viewQuery);
final QueryState qs = new QueryState(SessionState.get().getConf());
CalcitePlanner analyzer = new CalcitePlanner(qs);
analyzer.initCtx(new Context(SessionState.get().getConf()));
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
index e36e1bd..0dc0c24 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
@@ -38,19 +38,19 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-class ASTBuilder {
+public class ASTBuilder {
- static ASTBuilder construct(int tokenType, String text) {
+ public static ASTBuilder construct(int tokenType, String text) {
ASTBuilder b = new ASTBuilder();
b.curr = createAST(tokenType, text);
return b;
}
- static ASTNode createAST(int tokenType, String text) {
+ public static ASTNode createAST(int tokenType, String text) {
return (ASTNode) ParseDriver.adaptor.create(tokenType, text);
}
- static ASTNode destNode() {
+ public static ASTNode destNode() {
return ASTBuilder
.construct(HiveParser.TOK_DESTINATION, "TOK_DESTINATION")
.add(
@@ -58,7 +58,7 @@ class ASTBuilder {
"TOK_TMP_FILE")).node();
}
- static ASTNode table(RelNode scan) {
+ public static ASTNode table(RelNode scan) {
HiveTableScan hts;
if (scan instanceof DruidQuery) {
hts = (HiveTableScan) ((DruidQuery)scan).getTableScan();
@@ -102,7 +102,7 @@ class ASTBuilder {
return b.node();
}
- static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType, ASTNode cond,
+ public static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType, ASTNode cond,
boolean semiJoin) {
ASTBuilder b = null;
@@ -129,12 +129,12 @@ class ASTBuilder {
return b.node();
}
- static ASTNode subQuery(ASTNode qry, String alias) {
+ public static ASTNode subQuery(ASTNode qry, String alias) {
return ASTBuilder.construct(HiveParser.TOK_SUBQUERY, "TOK_SUBQUERY").add(qry)
.add(HiveParser.Identifier, alias).node();
}
- static ASTNode qualifiedName(String tableName, String colName) {
+ public static ASTNode qualifiedName(String tableName, String colName) {
ASTBuilder b = ASTBuilder
.construct(HiveParser.DOT, ".")
.add(
@@ -143,36 +143,36 @@ class ASTBuilder {
return b.node();
}
- static ASTNode unqualifiedName(String colName) {
+ public static ASTNode unqualifiedName(String colName) {
ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL").add(
HiveParser.Identifier, colName);
return b.node();
}
- static ASTNode where(ASTNode cond) {
+ public static ASTNode where(ASTNode cond) {
return ASTBuilder.construct(HiveParser.TOK_WHERE, "TOK_WHERE").add(cond).node();
}
- static ASTNode having(ASTNode cond) {
+ public static ASTNode having(ASTNode cond) {
return ASTBuilder.construct(HiveParser.TOK_HAVING, "TOK_HAVING").add(cond).node();
}
- static ASTNode limit(Object offset, Object limit) {
+ public static ASTNode limit(Object offset, Object limit) {
return ASTBuilder.construct(HiveParser.TOK_LIMIT, "TOK_LIMIT")
.add(HiveParser.Number, offset.toString())
.add(HiveParser.Number, limit.toString()).node();
}
- static ASTNode selectExpr(ASTNode expr, String alias) {
+ public static ASTNode selectExpr(ASTNode expr, String alias) {
return ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR").add(expr)
.add(HiveParser.Identifier, alias).node();
}
- static ASTNode literal(RexLiteral literal) {
+ public static ASTNode literal(RexLiteral literal) {
return literal(literal, false);
}
- static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
+ public static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
Object val = null;
int type = 0;
SqlTypeName sqlType = literal.getType().getSqlTypeName();
@@ -328,21 +328,21 @@ class ASTBuilder {
ASTNode curr;
- ASTNode node() {
+ public ASTNode node() {
return curr;
}
- ASTBuilder add(int tokenType, String text) {
+ public ASTBuilder add(int tokenType, String text) {
ParseDriver.adaptor.addChild(curr, createAST(tokenType, text));
return this;
}
- ASTBuilder add(ASTBuilder b) {
+ public ASTBuilder add(ASTBuilder b) {
ParseDriver.adaptor.addChild(curr, b.curr);
return this;
}
- ASTBuilder add(ASTNode n) {
+ public ASTBuilder add(ASTNode n) {
if (n != null) {
ParseDriver.adaptor.addChild(curr, n);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
index 340d29a..5659a72 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
@@ -62,9 +62,7 @@ public final class RewriteParseContextGenerator {
Operator<? extends OperatorDesc> operatorTree;
try {
Context ctx = new Context(queryState.getConf());
- ParseDriver pd = new ParseDriver();
- ASTNode tree = pd.parse(command, ctx);
- tree = ParseUtils.findRootNonNullToken(tree);
+ ASTNode tree = ParseUtils.parse(command, ctx);
BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
assert(sem instanceof SemanticAnalyzer);
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 10f16ca..21bf020 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -973,17 +973,18 @@ public class CalcitePlanner extends SemanticAnalyzer {
return newAst;
}
- /**
- * Performs breadth-first search of the AST for a nested set of tokens. Tokens
- * don't have to be each others' direct children, they can be separated by
- * layers of other tokens. For each token in the list, the first one found is
- * matched and there's no backtracking; thus, if AST has multiple instances of
- * some token, of which only one matches, it is not guaranteed to be found. We
- * use this for simple things. Not thread-safe - reuses searchQueue.
- */
- static class ASTSearcher {
+
+ public static class ASTSearcher {
private final LinkedList<ASTNode> searchQueue = new LinkedList<ASTNode>();
+ /**
+ * Performs breadth-first search of the AST for a nested set of tokens. Tokens
+ * don't have to be each others' direct children, they can be separated by
+ * layers of other tokens. For each token in the list, the first one found is
+ * matched and there's no backtracking; thus, if AST has multiple instances of
+ * some token, of which only one matches, it is not guaranteed to be found. We
+ * use this for simple things. Not thread-safe - reuses searchQueue.
+ */
public ASTNode simpleBreadthFirstSearch(ASTNode ast, int... tokens) {
searchQueue.clear();
searchQueue.add(ast);
@@ -1007,6 +1008,38 @@ public class CalcitePlanner extends SemanticAnalyzer {
}
return null;
}
+
+ public ASTNode depthFirstSearch(ASTNode ast, int token) {
+ searchQueue.clear();
+ searchQueue.add(ast);
+ while (!searchQueue.isEmpty()) {
+ ASTNode next = searchQueue.poll();
+ if (next.getType() == token) return next;
+ for (int j = 0; j < next.getChildCount(); ++j) {
+ searchQueue.add((ASTNode) next.getChild(j));
+ }
+ }
+ return null;
+ }
+
+ public ASTNode simpleBreadthFirstSearchAny(ASTNode ast, int... tokens) {
+ searchQueue.clear();
+ searchQueue.add(ast);
+ while (!searchQueue.isEmpty()) {
+ ASTNode next = searchQueue.poll();
+ for (int i = 0; i < tokens.length; ++i) {
+ if (next.getType() == tokens[i]) return next;
+ }
+ for (int i = 0; i < next.getChildCount(); ++i) {
+ searchQueue.add((ASTNode) next.getChild(i));
+ }
+ }
+ return null;
+ }
+
+ public void reset() {
+ searchQueue.clear();
+ }
}
private static void replaceASTChild(ASTNode child, ASTNode newChild) {
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
index 80e62c1..3b719af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
@@ -132,9 +132,7 @@ public class ColumnStatsAutoGatherContext {
//0. initialization
Context ctx = new Context(conf);
ctx.setExplainConfig(origCtx.getExplainConfig());
- ParseDriver pd = new ParseDriver();
- ASTNode tree = pd.parse(analyzeCommand, ctx);
- tree = ParseUtils.findRootNonNullToken(tree);
+ ASTNode tree = ParseUtils.parse(analyzeCommand, ctx);
//1. get the ColumnStatsSemanticAnalyzer
BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), tree);
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index ff07b42..93b8183 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -294,14 +294,12 @@ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer {
throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_IO_ERROR.getMsg());
}
ctx.setCmd(rewrittenQuery);
- ParseDriver pd = new ParseDriver();
try {
- rewrittenTree = pd.parse(rewrittenQuery, ctx);
+ rewrittenTree = ParseUtils.parse(rewrittenQuery, ctx);
} catch (ParseException e) {
throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_PARSE_ERROR.getMsg());
}
- rewrittenTree = ParseUtils.findRootNonNullToken(rewrittenTree);
return rewrittenTree;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index eb81393..b4b5bfb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -42,6 +42,7 @@ TOK_SUBQUERY;
TOK_INSERT_INTO;
TOK_DESTINATION;
TOK_ALLCOLREF;
+TOK_SETCOLREF;
TOK_TABLE_OR_COL;
TOK_FUNCTION;
TOK_FUNCTIONDI;
@@ -2442,7 +2443,7 @@ fromStatement
)
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
- ^(TOK_SELECT ^(TOK_SELEXPR TOK_ALLCOLREF))
+ ^(TOK_SELECT ^(TOK_SELEXPR TOK_SETCOLREF))
)
)
-> {$fromStatement.tree}
@@ -2526,7 +2527,7 @@ selectStatement
)
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
- ^(TOK_SELECT ^(TOK_SELEXPR TOK_ALLCOLREF))
+ ^(TOK_SELECT ^(TOK_SELEXPR TOK_SETCOLREF))
$o? $c? $d? $sort? $l?
)
)
@@ -2545,7 +2546,7 @@ setOpSelectStatement[CommonTree t]
)
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
- ^(TOK_SELECTDI ^(TOK_SELEXPR TOK_ALLCOLREF))
+ ^(TOK_SELECTDI ^(TOK_SELEXPR TOK_SETCOLREF))
)
)
-> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}?
@@ -2560,7 +2561,7 @@ setOpSelectStatement[CommonTree t]
)
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
- ^(TOK_SELECTDI ^(TOK_SELEXPR TOK_ALLCOLREF))
+ ^(TOK_SELECTDI ^(TOK_SELEXPR TOK_SETCOLREF))
)
)
-> ^($u {$t} $b)
@@ -2579,7 +2580,7 @@ setOpSelectStatement[CommonTree t]
)
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
- ^(TOK_SELECT ^(TOK_SELEXPR TOK_ALLCOLREF))
+ ^(TOK_SELECT ^(TOK_SELEXPR TOK_SETCOLREF))
)
)
-> {$setOpSelectStatement.tree}
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
index 943e6af..473a664 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
@@ -18,6 +18,15 @@
package org.apache.hadoop.hive.ql.parse;
+import org.apache.hadoop.hive.ql.Context;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.antlr.runtime.tree.CommonTree;
+import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTBuilder;
+import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher;
+
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
@@ -48,6 +57,26 @@ import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
*
*/
public final class ParseUtils {
+ /** Parses the Hive query. */
+ private static final Logger LOG = LoggerFactory.getLogger(ParseUtils.class);
+ public static ASTNode parse(String command) throws ParseException {
+ return parse(command, null);
+ }
+
+ /** Parses the Hive query. */
+ public static ASTNode parse(String command, Context ctx) throws ParseException {
+ return parse(command, ctx, true);
+ }
+
+ /** Parses the Hive query. */
+ public static ASTNode parse(
+ String command, Context ctx, boolean setTokenRewriteStream) throws ParseException {
+ ParseDriver pd = new ParseDriver();
+ ASTNode tree = pd.parse(command, ctx, setTokenRewriteStream);
+ tree = findRootNonNullToken(tree);
+ handleSetColRefs(tree);
+ return tree;
+ }
/**
* Tests whether the parse tree node is a join token.
@@ -77,7 +106,7 @@ public final class ParseUtils {
*
* @return node at which descent stopped
*/
- public static ASTNode findRootNonNullToken(ASTNode tree) {
+ private static ASTNode findRootNonNullToken(ASTNode tree) {
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = (ASTNode) tree.getChild(0);
}
@@ -311,4 +340,163 @@ public final class ParseUtils {
return stack.empty() && otherStack.empty();
}
+
+
+ private static void handleSetColRefs(ASTNode tree) {
+ CalcitePlanner.ASTSearcher astSearcher = new CalcitePlanner.ASTSearcher();
+ while (true) {
+ astSearcher.reset();
+ ASTNode setCols = astSearcher.depthFirstSearch(tree, HiveParser.TOK_SETCOLREF);
+ if (setCols == null) break;
+ processSetColsNode(setCols, astSearcher);
+ }
+ }
+
+ /**
+ * Replaces a spurious TOK_SETCOLREF added by parser with column names referring to the query
+ * in e.g. a union. This is to maintain the expectations that some code, like order by position
+ * alias, might have about not having ALLCOLREF. If it cannot find the columns with confidence
+ * it will just replace SETCOLREF with ALLCOLREF. Most of the cases where that happens are
+ * easy to work around in the query (e.g. by adding column aliases in the union).
+ * @param setCols TOK_SETCOLREF ASTNode.
+ * @param searcher AST searcher to reuse.
+ */
+ private static void processSetColsNode(ASTNode setCols, ASTSearcher searcher) {
+ searcher.reset();
+ CommonTree rootNode = setCols;
+ while (rootNode != null && rootNode.getType() != HiveParser.TOK_INSERT) {
+ rootNode = rootNode.parent;
+ }
+ if (rootNode == null || rootNode.parent == null) {
+ // Couldn't find the parent insert; replace with ALLCOLREF.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the root INSERT");
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ rootNode = rootNode.parent; // TOK_QUERY above insert
+ Tree fromNode = null;
+ for (int j = 0; j < rootNode.getChildCount(); ++j) {
+ Tree child = rootNode.getChild(j);
+ if (child.getType() == HiveParser.TOK_FROM) {
+ fromNode = child;
+ break;
+ }
+ }
+ if (!(fromNode instanceof ASTNode)) {
+ // Couldn't find the from that contains subquery; replace with ALLCOLREF.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the FROM");
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ // We are making what we are trying to do more explicit if there's a union alias; so
+ // that if we do something we didn't expect to do, it'd be more likely to fail.
+ String alias = null;
+ if (fromNode.getChildCount() > 0) {
+ Tree fromWhat = fromNode.getChild(0);
+ if (fromWhat.getType() == HiveParser.TOK_SUBQUERY && fromWhat.getChildCount() > 1) {
+ Tree child = fromWhat.getChild(fromWhat.getChildCount() - 1);
+ if (child.getType() == HiveParser.Identifier) {
+ alias = child.getText();
+ }
+ }
+ }
+ // We find the SELECT closest to the top. This assumes there's only one FROM or FROM-s
+ // are all equivalent (union case). Also, this assumption could be false for an already
+ // malformed query; we don't check for that here - it will fail later anyway.
+ // TODO: Maybe we should find ALL the SELECT-s not nested in another from, and compare.
+ ASTNode select = searcher.simpleBreadthFirstSearchAny((ASTNode)fromNode,
+ HiveParser.TOK_SELECT, HiveParser.TOK_SELECTDI);
+ if (select == null) {
+ // Couldn't find the from that contains subquery; replace with ALLCOLREF.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the SELECT");
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ // Found the proper columns.
+ List<ASTNode> newChildren = new ArrayList<>(select.getChildCount());
+ HashSet<String> aliases = new HashSet<>();
+ for (int i = 0; i < select.getChildCount(); ++i) {
+ Tree selExpr = select.getChild(i);
+ assert selExpr.getType() == HiveParser.TOK_SELEXPR;
+ assert selExpr.getChildCount() > 0;
+ // Examine the last child. It could be an alias.
+ Tree child = selExpr.getChild(selExpr.getChildCount() - 1);
+ switch (child.getType()) {
+ case HiveParser.TOK_SETCOLREF:
+ // We have a nested setcolref. Process that and start from scratch TODO: use stack?
+ processSetColsNode((ASTNode)child, searcher);
+ processSetColsNode(setCols, searcher);
+ return;
+ case HiveParser.TOK_ALLCOLREF:
+ // We should find an alias of this insert and do (alias).*. This however won't fix e.g.
+ // positional order by alias case, cause we'd still have a star on the top level. Bail.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because of nested ALLCOLREF");
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ case HiveParser.TOK_TABLE_OR_COL:
+ Tree idChild = child.getChild(0);
+ assert idChild.getType() == HiveParser.Identifier : idChild;
+ if (!createChildColumnRef(idChild, alias, newChildren, aliases)) {
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ break;
+ case HiveParser.Identifier:
+ if (!createChildColumnRef(child, alias, newChildren, aliases)) {
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ break;
+ case HiveParser.DOT: {
+ Tree colChild = child.getChild(child.getChildCount() - 1);
+ assert colChild.getType() == HiveParser.Identifier : colChild;
+ if (!createChildColumnRef(colChild, alias, newChildren, aliases)) {
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ break;
+ }
+ default:
+ // Not really sure how to refer to this (or if we can).
+ // TODO: We could find a different from branch for the union, that might have an alias?
+ // Or we could add an alias here to refer to, but that might break other branches.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because of the nested node "
+ + child.getType() + " " + child.getText());
+ setCols.token.setType(HiveParser.TOK_ALLCOLREF);
+ return;
+ }
+ }
+ // Insert search in the beginning would have failed if these parents didn't exist.
+ ASTNode parent = (ASTNode)setCols.parent.parent;
+ int t = parent.getType();
+ assert t == HiveParser.TOK_SELECT || t == HiveParser.TOK_SELECTDI : t;
+ int ix = setCols.parent.childIndex;
+ parent.deleteChild(ix);
+ for (ASTNode node : newChildren) {
+ parent.insertChild(ix++, node);
+ }
+ }
+
+ private static boolean createChildColumnRef(Tree child, String alias,
+ List<ASTNode> newChildren, HashSet<String> aliases) {
+ String colAlias = child.getText();
+ if (!aliases.add(colAlias)) {
+ // TODO: if a side of the union has 2 columns with the same name, noone on the higher
+ // level can refer to them. We could change the alias in the original node.
+ LOG.debug("Replacing SETCOLREF with ALLCOLREF because of duplicate alias " + colAlias);
+ return false;
+ }
+ ASTBuilder selExpr = ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
+ ASTBuilder toc = ASTBuilder.construct(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL");
+ ASTBuilder id = ASTBuilder.construct(HiveParser.Identifier, colAlias);
+ if (alias == null) {
+ selExpr = selExpr.add(toc.add(id));
+ } else {
+ ASTBuilder dot = ASTBuilder.construct(HiveParser.DOT, ".");
+ ASTBuilder aliasNode = ASTBuilder.construct(HiveParser.Identifier, alias);
+ selExpr = selExpr.add(dot.add(toc.add(aliasNode)).add(id));
+ }
+ newChildren.add(selExpr.node());
+ return true;
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 9eafb0b..2430811 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -2429,7 +2429,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
private void replaceViewReferenceWithDefinition(QB qb, Table tab,
String tab_name, String alias) throws SemanticException {
- ParseDriver pd = new ParseDriver();
ASTNode viewTree;
final ASTNodeOrigin viewOrigin = new ASTNodeOrigin("VIEW", tab.getTableName(),
tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(
@@ -2438,8 +2437,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
String viewText = tab.getViewExpandedText();
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
- ASTNode tree = pd.parse(viewText, ctx, false);
- tree = ParseUtils.findRootNonNullToken(tree);
+ ASTNode tree = ParseUtils.parse(viewText, ctx, false);
viewTree = tree;
Dispatcher nodeOriginDispatcher = new Dispatcher() {
@Override
@@ -10885,11 +10883,10 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
// check if we need to ctx.setCmd(rewrittenQuery);
ParseDriver pd = new ParseDriver();
try {
- rewrittenTree = pd.parse(rewrittenQuery);
+ rewrittenTree = ParseUtils.parse(rewrittenQuery);
} catch (ParseException e) {
throw new SemanticException(e);
}
- rewrittenTree = ParseUtils.findRootNonNullToken(rewrittenTree);
return rewrittenTree;
} else {
return ast;
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
index 865c03a..64f1bdd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
@@ -299,13 +299,10 @@ public class UpdateDeleteSemanticAnalyzer extends SemanticAnalyzer {
}
rewrittenCtx.setCmd(rewrittenQueryStr.toString());
- ParseDriver pd = new ParseDriver();
ASTNode rewrittenTree;
try {
LOG.info("Going to reparse <" + originalQuery + "> as \n<" + rewrittenQueryStr.toString() + ">");
- rewrittenTree = pd.parse(rewrittenQueryStr.toString(), rewrittenCtx);
- rewrittenTree = ParseUtils.findRootNonNullToken(rewrittenTree);
-
+ rewrittenTree = ParseUtils.parse(rewrittenQueryStr.toString(), rewrittenCtx);
} catch (ParseException e) {
throw new SemanticException(ErrorMsg.UPDATEDELETE_PARSE_ERROR.getMsg(), e);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
index 12154c9..aca8354 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.tools;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
@@ -109,8 +111,7 @@ public class LineageInfo implements NodeProcessor {
/*
* Get the AST tree
*/
- ParseDriver pd = new ParseDriver();
- ASTNode tree = pd.parse(query);
+ ASTNode tree = ParseUtils.parse(query, null);
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = (ASTNode) tree.getChild(0);
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index c659806..c734988 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -35,7 +35,6 @@ import org.junit.Test;
public class TestMacroSemanticAnalyzer {
- private ParseDriver parseDriver;
private MacroSemanticAnalyzer analyzer;
private QueryState queryState;
private HiveConf conf;
@@ -47,12 +46,11 @@ public class TestMacroSemanticAnalyzer {
conf = queryState.getConf();
SessionState.start(conf);
context = new Context(conf);
- parseDriver = new ParseDriver();
analyzer = new MacroSemanticAnalyzer(queryState);
}
private ASTNode parse(String command) throws Exception {
- return ParseUtils.findRootNonNullToken(parseDriver.parse(command));
+ return ParseUtils.parse(command);
}
private void analyze(ASTNode ast) throws Exception {
analyzer.analyze(ast, context);
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index d6fe540..a573808 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -258,9 +258,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
ctx.setCmd(query);
ctx.setHDFSCleanup(true);
- ParseDriver pd = new ParseDriver();
- ASTNode tree = pd.parse(query, ctx);
- tree = ParseUtils.findRootNonNullToken(tree);
+ ASTNode tree = ParseUtils.parse(query, ctx);
BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
SessionState.get().initTxnMgr(conf);
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
index e8e29ee..d0395dd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
@@ -67,7 +67,7 @@ public class AuthorizationTestUtil {
}
private static ASTNode parse(String command) throws Exception {
- return ParseUtils.findRootNonNullToken((new ParseDriver()).parse(command));
+ return ParseUtils.parse(command);
}
/**
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/queries/clientpositive/union_pos_alias.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/union_pos_alias.q b/ql/src/test/queries/clientpositive/union_pos_alias.q
new file mode 100644
index 0000000..c4eca68
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/union_pos_alias.q
@@ -0,0 +1,30 @@
+set hive.mapred.mode=nonstrict;
+
+
+explain
+select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1;
+
+select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1;
+
+drop table src_10;
+create table src_10 as select * from src limit 10;
+
+explain
+select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc;
+
+
+select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc;
+
+drop table src_10;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/results/clientpositive/constant_prop_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/constant_prop_1.q.out b/ql/src/test/results/clientpositive/constant_prop_1.q.out
index aaa1dac..3ba1f15 100644
--- a/ql/src/test/results/clientpositive/constant_prop_1.q.out
+++ b/ql/src/test/results/clientpositive/constant_prop_1.q.out
@@ -99,7 +99,7 @@ STAGE PLANS:
Union
Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 1
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
@@ -115,7 +115,7 @@ STAGE PLANS:
Union
Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 1
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
http://git-wip-us.apache.org/repos/asf/hive/blob/78e4bb79/ql/src/test/results/clientpositive/union_pos_alias.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_pos_alias.q.out b/ql/src/test/results/clientpositive/union_pos_alias.q.out
new file mode 100644
index 0000000..8eddbd9
--- /dev/null
+++ b/ql/src/test/results/clientpositive/union_pos_alias.q.out
@@ -0,0 +1,308 @@
+PREHOOK: query: explain
+select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1, Stage-3, Stage-4
+ Stage-3 is a root stage
+ Stage-4 is a root stage
+ Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: s1
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Group By Operator
+ aggregations: count(1)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 'tst1' (type: string), _col0 (type: bigint)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Union
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col1 (type: bigint)
+ TableScan
+ Union
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col1 (type: bigint)
+ TableScan
+ Union
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col1 (type: bigint)
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-3
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: s2
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Group By Operator
+ aggregations: count(1)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 'tst2' (type: string), _col0 (type: bigint)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-4
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: s3
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+ Group By Operator
+ aggregations: count(1)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 'tst3' (type: string), _col0 (type: bigint)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select 'tst1' as key, count(1) as value from src s1
+UNION ALL
+select key, value from (select 'tst2' as key, count(1) as value from src s2 UNION ALL select 'tst3' as key, count(1) as value from src s3) s4
+order by 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+tst1 500
+tst2 500
+tst3 500
+PREHOOK: query: drop table src_10
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table src_10
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table src_10 as select * from src limit 10
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_10
+POSTHOOK: query: create table src_10 as select * from src limit 10
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_10
+POSTHOOK: Lineage: src_10.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_10.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: src_10
+ Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), value (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+ Union
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col1 (type: string), _col0 (type: string)
+ sort order: +-
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ TableScan
+ alias: s3
+ Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 'test' (type: string), value (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+ Union
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col1 (type: string), _col0 (type: string)
+ sort order: +-
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey1 (type: string), KEY.reducesinkkey0 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_10
+#### A masked pattern was here ####
+POSTHOOK: query: select key as value, value as key from src_10
+UNION ALL
+select 'test', value from src_10 s3
+order by 2, 1 desc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_10
+#### A masked pattern was here ####
+test val_165
+165 val_165
+test val_238
+238 val_238
+test val_255
+255 val_255
+test val_27
+27 val_27
+test val_278
+278 val_278
+test val_311
+311 val_311
+test val_409
+409 val_409
+test val_484
+484 val_484
+test val_86
+86 val_86
+test val_98
+98 val_98
+PREHOOK: query: drop table src_10
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@src_10
+PREHOOK: Output: default@src_10
+POSTHOOK: query: drop table src_10
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@src_10
+POSTHOOK: Output: default@src_10