You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rh...@apache.org on 2014/04/09 19:32:31 UTC
svn commit: r1586050 [1/4] - in /hive/branches/branch-0.13/ql/src:
java/org/apache/hadoop/hive/ql/optimizer/
java/org/apache/hadoop/hive/ql/optimizer/correlation/
java/org/apache/hadoop/hive/ql/parse/ test/queries/clientpositive/
test/results/clientpos...
Author: rhbutani
Date: Wed Apr 9 17:32:30 2014
New Revision: 1586050
URL: http://svn.apache.org/r1586050
Log:
HIVE-4904 A little more CP crossing RS boundaries (Navis Ryu via Ashutosh Chauhan)
Added:
hive/branches/branch-0.13/ql/src/test/queries/clientpositive/order_within_subquery.q
hive/branches/branch-0.13/ql/src/test/results/clientpositive/order_within_subquery.q.out
Modified:
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java
hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/branches/branch-0.13/ql/src/test/results/clientpositive/annotate_stats_select.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join18.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join27.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join30.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join31.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join32.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/count.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/distinct_stats.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby3_map.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby3_map_skew.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_cube1.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_distinct_samekey.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_multi_insert_common_distinct.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_position.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_rollup1.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_sort_11.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby_sort_8.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/join18.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/limit_pushdown.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/limit_pushdown_negative.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/metadataonly1.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/multi_insert_gby2.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/multi_insert_gby3.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/nullgroup.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/nullgroup2.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/nullgroup4.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/udf_count.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union11.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union14.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union15.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union16.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union2.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union25.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union28.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union3.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union30.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union31.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union5.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union7.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union9.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/union_view.q.out
hive/branches/branch-0.13/ql/src/test/results/clientpositive/vectorization_limit.q.out
hive/branches/branch-0.13/ql/src/test/results/compiler/plan/groupby2.q.xml
hive/branches/branch-0.13/ql/src/test/results/compiler/plan/groupby3.q.xml
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java Wed Apr 9 17:32:30 2014
@@ -81,7 +81,7 @@ public class ColumnPruner implements Tra
opToParseCtxMap = pGraphContext.getOpParseCtx();
// generate pruned column list for all relevant operators
- ColumnPrunerProcCtx cppCtx = new ColumnPrunerProcCtx(opToParseCtxMap);
+ ColumnPrunerProcCtx cppCtx = new ColumnPrunerProcCtx(pactx);
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java Wed Apr 9 17:32:30 2014
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.Se
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -40,19 +41,25 @@ import org.apache.hadoop.hive.ql.plan.Se
*/
public class ColumnPrunerProcCtx implements NodeProcessorCtx {
+ private final ParseContext pctx;
+
private final Map<Operator<? extends OperatorDesc>, List<String>> prunedColLists;
private final HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;
private final Map<CommonJoinOperator, Map<Byte, List<String>>> joinPrunedColLists;
- public ColumnPrunerProcCtx(
- HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseContextMap) {
+ public ColumnPrunerProcCtx(ParseContext pctx) {
+ this.pctx = pctx;
+ this.opToParseCtxMap = pctx.getOpParseCtx();
prunedColLists = new HashMap<Operator<? extends OperatorDesc>, List<String>>();
- opToParseCtxMap = opToParseContextMap;
joinPrunedColLists = new HashMap<CommonJoinOperator, Map<Byte, List<String>>>();
}
+ public ParseContext getParseContext() {
+ return pctx;
+ }
+
public Map<CommonJoinOperator, Map<Byte, List<String>>> getJoinPrunedColLists() {
return joinPrunedColLists;
}
@@ -85,17 +92,25 @@ public class ColumnPrunerProcCtx impleme
*/
public List<String> genColLists(Operator<? extends OperatorDesc> curOp)
throws SemanticException {
- List<String> colList = new ArrayList<String>();
- if (curOp.getChildOperators() != null) {
- for (Operator<? extends OperatorDesc> child : curOp.getChildOperators()) {
- if (child instanceof CommonJoinOperator) {
- int tag = child.getParentOperators().indexOf(curOp);
- List<String> prunList = joinPrunedColLists.get(child).get((byte) tag);
- colList = Utilities.mergeUniqElems(colList, prunList);
- } else {
- colList = Utilities
- .mergeUniqElems(colList, prunedColLists.get(child));
- }
+ if (curOp.getChildOperators() == null) {
+ return null;
+ }
+ List<String> colList = null;
+ for (Operator<? extends OperatorDesc> child : curOp.getChildOperators()) {
+ List<String> prunList;
+ if (child instanceof CommonJoinOperator) {
+ int tag = child.getParentOperators().indexOf(curOp);
+ prunList = joinPrunedColLists.get(child).get((byte) tag);
+ } else {
+ prunList = prunedColLists.get(child);
+ }
+ if (prunList == null) {
+ continue;
+ }
+ if (colList == null) {
+ colList = new ArrayList<String>(prunList);
+ } else {
+ colList = Utilities.mergeUniqElems(colList, prunList);
}
}
return colList;
@@ -135,7 +150,7 @@ public class ColumnPrunerProcCtx impleme
List<String> cols = new ArrayList<String>();
SelectDesc conf = op.getConf();
- if (conf.isSelStarNoCompute()) {
+ if (colList != null && conf.isSelStarNoCompute()) {
cols.addAll(colList);
return cols;
}
@@ -148,7 +163,7 @@ public class ColumnPrunerProcCtx impleme
// input columns are used.
List<String> outputColumnNames = conf.getOutputColumnNames();
for (int i = 0; i < outputColumnNames.size(); i++) {
- if (colList.contains(outputColumnNames.get(i))) {
+ if (colList == null || colList.contains(outputColumnNames.get(i))) {
ExprNodeDesc expr = selectExprs.get(i);
cols = Utilities.mergeUniqElems(cols, expr.getCols());
}
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Wed Apr 9 17:32:30 2014
@@ -32,30 +32,24 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
-import org.apache.hadoop.hive.ql.exec.ExtractOperator;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;
-import org.apache.hadoop.hive.ql.exec.LimitOperator;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.PTFOperator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
-import org.apache.hadoop.hive.ql.exec.ScriptOperator;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.UDTFOperator;
-import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
@@ -169,15 +163,24 @@ public final class ColumnPrunerProcFacto
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
+ ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
PTFOperator op = (PTFOperator) nd;
PTFDesc conf = op.getConf();
//Since we cannot know what columns will be needed by a PTF chain,
//we do not prune columns on PTFOperator for PTF chains.
if (!conf.forWindowing()) {
- return getDefaultProc().process(nd, stack, ctx, nodeOutputs);
+
+ Operator<? extends OperatorDesc> parent = op.getParentOperators().get(0);
+ RowResolver parentRR = cppCtx.getParseContext().getOpParseCtx().get(parent).getRowResolver();
+ List<ColumnInfo> sig = parentRR.getRowSchema().getSignature();
+ List<String> colList = new ArrayList<String>();
+ for(ColumnInfo cI : sig) {
+ colList.add(cI.getInternalName());
+ }
+ cppCtx.getPrunedColLists().put(op, colList);
+ return null;
}
- ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();
@@ -299,6 +302,12 @@ public final class ColumnPrunerProcFacto
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
List<String> cols = cppCtx
.genColLists((Operator<? extends OperatorDesc>) nd);
+ if (cols == null && !scanOp.getConf().isGatherStats() ) {
+ scanOp.setNeededColumnIDs(null);
+ return null;
+ }
+ cols = cols == null ? new ArrayList<String>() : cols;
+
cppCtx.getPrunedColLists().put((Operator<? extends OperatorDesc>) nd,
cols);
List<Integer> neededColumnIds = new ArrayList<Integer>();
@@ -363,14 +372,7 @@ public final class ColumnPrunerProcFacto
Object... nodeOutputs) throws SemanticException {
ReduceSinkOperator op = (ReduceSinkOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
- HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap = cppCtx
- .getOpToParseCtxMap();
- RowResolver redSinkRR = opToParseCtxMap.get(op).getRowResolver();
ReduceSinkDesc conf = op.getConf();
- List<Operator<? extends OperatorDesc>> childOperators = op
- .getChildOperators();
- List<Operator<? extends OperatorDesc>> parentOperators = op
- .getParentOperators();
List<String> colLists = new ArrayList<String>();
ArrayList<ExprNodeDesc> keys = conf.getKeyCols();
@@ -378,75 +380,60 @@ public final class ColumnPrunerProcFacto
colLists = Utilities.mergeUniqElems(colLists, key.getCols());
}
- if ((childOperators.size() == 1)
- && (childOperators.get(0) instanceof JoinOperator)) {
- assert parentOperators.size() == 1;
- Operator<? extends OperatorDesc> par = parentOperators.get(0);
- JoinOperator childJoin = (JoinOperator) childOperators.get(0);
- RowResolver parRR = opToParseCtxMap.get(par).getRowResolver();
- List<String> childJoinCols = cppCtx.getJoinPrunedColLists().get(
- childJoin).get((byte) conf.getTag());
- boolean[] flags = new boolean[conf.getValueCols().size()];
- for (int i = 0; i < flags.length; i++) {
- flags[i] = false;
- }
- if (childJoinCols != null && childJoinCols.size() > 0) {
- Map<String, ExprNodeDesc> exprMap = op.getColumnExprMap();
- for (String childCol : childJoinCols) {
- ExprNodeDesc desc = exprMap.get(childCol);
- int index = conf.getValueCols().indexOf(desc);
- flags[index] = true;
- String[] nm = redSinkRR.reverseLookup(childCol);
- if (nm != null) {
- ColumnInfo cInfo = parRR.get(nm[0], nm[1]);
- if (!colLists.contains(cInfo.getInternalName())) {
- colLists.add(cInfo.getInternalName());
- }
- }
- }
- }
- Collections.sort(colLists);
- pruneReduceSinkOperator(flags, op, cppCtx);
- } else if ((childOperators.size() == 1)
- && (childOperators.get(0) instanceof ExtractOperator )
- && (childOperators.get(0).getChildOperators().size() == 1)
- && (childOperators.get(0).getChildOperators().get(0) instanceof PTFOperator )
- && ((PTFOperator)childOperators.get(0).
- getChildOperators().get(0)).getConf().forWindowing() ) {
+ assert op.getNumChild() == 1;
- /*
- * For RS that are followed by Extract & PTFOp for windowing
- * - do the same thing as above. Reconstruct ValueColumn list based on what is required
- * by the PTFOp.
- */
+ Operator<? extends OperatorDesc> child = op.getChildOperators().get(0);
- assert parentOperators.size() == 1;
+ List<String> childCols;
+ if (child instanceof CommonJoinOperator) {
+ childCols = cppCtx.getJoinPrunedColLists().get(child)
+ .get((byte) conf.getTag());
+ } else {
+ childCols = cppCtx.getPrunedColList(child);
- PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
- List<String> childCols = cppCtx.getPrunedColList(ptfOp);
+ }
+ if (childCols != null) {
+ /*
+ * in the case of count(or sum) distinct if we are not able to map
+ * a parameter column references back to the ReduceSink value columns
+ * we give up and assume all columns are needed.
+ */
+ boolean hasUnresolvedReference = false;
boolean[] flags = new boolean[conf.getValueCols().size()];
- for (int i = 0; i < flags.length; i++) {
- flags[i] = false;
- }
- if (childCols != null && childCols.size() > 0) {
- ArrayList<String> outColNames = op.getConf().getOutputValueColumnNames();
- for(int i=0; i < outColNames.size(); i++ ) {
- if ( childCols.contains(outColNames.get(i))) {
- ExprNodeDesc exprNode = op.getConf().getValueCols().get(i);
- flags[i] = true;
- Utilities.mergeUniqElems(colLists, exprNode.getCols());
+ Map<String, ExprNodeDesc> exprMap = op.getColumnExprMap();
+ for (String childCol : childCols) {
+ ExprNodeDesc desc = exprMap.get(childCol);
+ int index = conf.getValueCols().indexOf(desc);
+ if (index < 0) {
+ hasUnresolvedReference = desc == null || ExprNodeDescUtils.indexOf(desc, conf.getKeyCols()) < 0;
+ if ( hasUnresolvedReference ) {
+ break;
}
+ continue;
+ }
+ flags[index] = true;
+ colLists = Utilities.mergeUniqElems(colLists, desc.getCols());
+ }
+
+ if ( hasUnresolvedReference ) {
+ for (ExprNodeDesc val : conf.getValueCols()) {
+ colLists = Utilities.mergeUniqElems(colLists, val.getCols());
}
+ cppCtx.getPrunedColLists().put(op, colLists);
+ return null;
}
+
Collections.sort(colLists);
pruneReduceSinkOperator(flags, op, cppCtx);
- } else {
- // Reduce Sink contains the columns needed - no need to aggregate from
- // children
- ArrayList<ExprNodeDesc> vals = conf.getValueCols();
- for (ExprNodeDesc val : vals) {
- colLists = Utilities.mergeUniqElems(colLists, val.getCols());
- }
+ cppCtx.getPrunedColLists().put(op, colLists);
+ return null;
+ }
+
+ // Reduce Sink contains the columns needed - no need to aggregate from
+ // children
+ ArrayList<ExprNodeDesc> vals = conf.getValueCols();
+ for (ExprNodeDesc val : vals) {
+ colLists = Utilities.mergeUniqElems(colLists, val.getCols());
}
cppCtx.getPrunedColLists().put(op, colLists);
@@ -472,7 +459,10 @@ public final class ColumnPrunerProcFacto
LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
List<String> cols = cppCtx.genColLists(op);
-
+ if (cols == null) {
+ return null;
+ }
+
Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
// As columns go down the DAG, the LVJ will transform internal column
// names from something like 'key' to '_col0'. Because of this, we need
@@ -556,37 +546,41 @@ public final class ColumnPrunerProcFacto
Object... nodeOutputs) throws SemanticException {
SelectOperator op = (SelectOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
-
- LateralViewJoinOperator lvJoin = null;
+
+
if (op.getChildOperators() != null) {
for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
- // If one of my children is a FileSink or Script, return all columns.
- // Without this break, a bug in ReduceSink to Extract edge column
- // pruning will manifest
- // which should be fixed before remove this
- if ((child instanceof FileSinkOperator)
- || (child instanceof ScriptOperator)
- || (child instanceof UDTFOperator)
- || (child instanceof LimitOperator)
- || (child instanceof UnionOperator)) {
+ // UDTF is not handled yet, so the parent SelectOp of UDTF should just assume
+ // all columns.
+ if ((child instanceof UDTFOperator)) {
cppCtx.getPrunedColLists()
.put(op, cppCtx.getColsFromSelectExpr(op));
return null;
}
- if (op.getConf().isSelStarNoCompute() && child instanceof LateralViewJoinOperator) {
- // this SEL is SEL(*) for LV
- lvJoin = (LateralViewJoinOperator) child;
- }
}
}
+
+ LateralViewJoinOperator lvJoin = null;
+ if (op.getConf().isSelStarNoCompute()) {
+ assert op.getNumChild() == 1;
+ Operator<? extends OperatorDesc> child = op.getChildOperators().get(0);
+ if (child instanceof LateralViewJoinOperator) { // this SEL is SEL(*)
+ // for LV
+ lvJoin = (LateralViewJoinOperator) child;
+ }
+ }
+
List<String> cols = cppCtx.genColLists(op);
SelectDesc conf = op.getConf();
if (lvJoin != null) {
// get columns for SEL(*) from LVJ
- RowResolver rr = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
- cppCtx.getPrunedColLists().put(op, cppCtx.getSelectColsFromLVJoin(rr, cols));
+ if (cols != null) {
+ RowResolver rr = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
+ cppCtx.getPrunedColLists().put(op,
+ cppCtx.getSelectColsFromLVJoin(rr, cols));
+ }
return null;
}
// The input to the select does not matter. Go over the expressions
@@ -594,7 +588,7 @@ public final class ColumnPrunerProcFacto
cppCtx.getPrunedColLists().put(op,
cppCtx.getSelectColsFromChildren(op, cols));
- if (conf.isSelStarNoCompute()) {
+ if (cols == null || conf.isSelStarNoCompute()) {
return null;
}
@@ -612,7 +606,8 @@ public final class ColumnPrunerProcFacto
ArrayList<String> newOutputColumnNames = new ArrayList<String>();
ArrayList<ColumnInfo> rs_oldsignature = op.getSchema().getSignature();
ArrayList<ColumnInfo> rs_newsignature = new ArrayList<ColumnInfo>();
- RowResolver old_rr = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
+ RowResolver old_rr = cppCtx.getOpToParseCtxMap().get(op)
+ .getRowResolver();
RowResolver new_rr = new RowResolver();
for (String col : cols) {
int index = originalOutputColumnNames.indexOf(col);
@@ -694,7 +689,8 @@ public final class ColumnPrunerProcFacto
Map<String, ExprNodeDesc> oldMap = reduce.getColumnExprMap();
LOG.info("RS " + reduce.getIdentifier() + " oldColExprMap: " + oldMap);
RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRowResolver();
- ArrayList<ColumnInfo> signature = oldRR.getRowSchema().getSignature();
+ ArrayList<ColumnInfo> old_signature = oldRR.getRowSchema().getSignature();
+ ArrayList<ColumnInfo> signature = new ArrayList<ColumnInfo>(old_signature);
List<String> valueColNames = reduceConf.getOutputValueColumnNames();
ArrayList<String> newValueColNames = new ArrayList<String>();
@@ -728,6 +724,8 @@ public final class ColumnPrunerProcFacto
}
}
+ oldRR.getRowSchema().setSignature(signature);
+ reduce.getSchema().setSignature(signature);
reduceConf.setOutputValueColumnNames(newValueColNames);
reduceConf.setValueCols(newValueExprs);
TableDesc newValueTable = PlanUtils.getReduceValueTableDesc(PlanUtils
@@ -838,17 +836,19 @@ public final class ColumnPrunerProcFacto
Map<String, ExprNodeDesc> columnExprMap,
Map<Byte, List<Integer>> retainMap, boolean mapJoin) throws SemanticException {
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
- Map<Byte, List<String>> prunedColLists = new HashMap<Byte, List<String>>();
List<Operator<? extends OperatorDesc>> childOperators = op
.getChildOperators();
- for (Operator<? extends OperatorDesc> child : childOperators) {
- if (child instanceof FileSinkOperator) {
+ List<String> childColLists = cppCtx.genColLists(op);
+ if (childColLists == null) {
return;
}
- }
- List<String> childColLists = cppCtx.genColLists(op);
+
+ Map<Byte, List<String>> prunedColLists = new HashMap<Byte, List<String>>();
+ for (byte tag : conf.getTagOrder()) {
+ prunedColLists.put(tag, new ArrayList<String>());
+ }
//add the columns in join filters
Set<Map.Entry<Byte, List<ExprNodeDesc>>> filters =
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java Wed Apr 9 17:32:30 2014
@@ -63,6 +63,10 @@ public class Optimizer {
transformations.add(new ListBucketingPruner());
}
}
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGROUPBY) ||
+ HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_MAP_GROUPBY_SORT)) {
+ transformations.add(new GroupByOptimizer());
+ }
transformations.add(new ColumnPruner());
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
transformations.add(new SkewJoinOptimizer());
@@ -70,10 +74,6 @@ public class Optimizer {
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGBYUSINGINDEX)) {
transformations.add(new RewriteGBUsingIndex());
}
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGROUPBY) ||
- HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_MAP_GROUPBY_SORT)) {
- transformations.add(new GroupByOptimizer());
- }
transformations.add(new SamplePruner());
transformations.add(new MapJoinProcessor());
boolean bucketMapJoinOptimizer = false;
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java Wed Apr 9 17:32:30 2014
@@ -478,6 +478,12 @@ public final class CorrelationUtilities
parent.setChildOperators(Utilities.makeList(newOperator));
}
+ public static void removeOperator(Operator<?> target, ParseContext context) {
+ assert target.getNumParent() == 1 && target.getNumChild() == 1;
+ removeOperator(target,
+ target.getChildOperators().get(0), target.getParentOperators().get(0), context);
+ }
+
protected static void removeOperator(Operator<?> target, Operator<?> child, Operator<?> parent,
ParseContext context) {
for (Operator<?> aparent : target.getParentOperators()) {
Modified: hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/branch-0.13/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Apr 9 17:32:30 2014
@@ -4097,12 +4097,13 @@ public class SemanticAnalyzer extends Ba
getColumnInternalName(inputField), "", false);
reduceValues.add(exprDesc);
inputField++;
- outputValueColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
- String field = Utilities.ReduceField.VALUE.toString() + "."
- + getColumnInternalName(reduceValues.size() - 1);
+ String outputColName = getColumnInternalName(reduceValues.size() - 1);
+ outputValueColumnNames.add(outputColName);
+ String internalName = Utilities.ReduceField.VALUE.toString() + "."
+ + outputColName;
reduceSinkOutputRowResolver.putExpression(entry.getValue(),
- new ColumnInfo(field, type, null, false));
- colExprMap.put(field, exprDesc);
+ new ColumnInfo(internalName, type, null, false));
+ colExprMap.put(internalName, exprDesc);
}
}
@@ -6326,19 +6327,18 @@ public class SemanticAnalyzer extends Ba
// signature and generate field expressions for those
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ int i = 0;
for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
+ String internalName = getColumnInternalName(i++);
+ outputColumns.add(internalName);
valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol()));
- colExprMap.put(colInfo.getInternalName(), valueCols
+ colExprMap.put(internalName, valueCols
.get(valueCols.size() - 1));
}
- ArrayList<String> outputColumns = new ArrayList<String>();
- for (int i = 0; i < valueCols.size(); i++) {
- outputColumns.add(getColumnInternalName(i));
- }
-
StringBuilder order = new StringBuilder();
for (int sortOrder : sortOrders) {
order.append(sortOrder == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC ? '+' : '-');
@@ -6445,20 +6445,19 @@ public class SemanticAnalyzer extends Ba
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
+ ArrayList<String> outputColumns = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
+ int i = 0;
for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
+ String internalName = getColumnInternalName(i++);
+ outputColumns.add(internalName);
valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol()));
- colExprMap.put(colInfo.getInternalName(), valueCols
- .get(valueCols.size() - 1));
+ colExprMap.put(internalName, valueCols.get(valueCols.size() - 1));
}
- ArrayList<String> outputColumns = new ArrayList<String>();
- for (int i = 0; i < valueCols.size(); i++) {
- outputColumns.add(getColumnInternalName(i));
- }
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(sortCols, valueCols, outputColumns, false, -1,
partitionCols, order.toString(), numReducers),
@@ -10908,13 +10907,13 @@ public class SemanticAnalyzer extends Ba
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
- colExprMap.put(colInfo.getInternalName(), valueColExpr);
- String outColName = SemanticAnalyzer.getColumnInternalName(pos++);
- outputColumnNames.add(outColName);
+ String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
+ outputColumnNames.add(internalName);
+ colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
- outColName, colInfo.getType(), alias[0],
+ internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsOpRR.put(alias[0], alias[1], newColInfo);
}
@@ -11155,13 +11154,13 @@ public class SemanticAnalyzer extends Ba
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
- colExprMap.put(colInfo.getInternalName(), valueColExpr);
- String outColName = SemanticAnalyzer.getColumnInternalName(pos++);
- outputColumnNames.add(outColName);
+ String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
+ outputColumnNames.add(internalName);
+ colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
- outColName, colInfo.getType(), alias[0],
+ internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsNewRR.put(alias[0], alias[1], newColInfo);
String[] altMapping = inputRR.getAlternateMappings(colInfo.getInternalName());
Added: hive/branches/branch-0.13/ql/src/test/queries/clientpositive/order_within_subquery.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/queries/clientpositive/order_within_subquery.q?rev=1586050&view=auto
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/queries/clientpositive/order_within_subquery.q (added)
+++ hive/branches/branch-0.13/ql/src/test/queries/clientpositive/order_within_subquery.q Wed Apr 9 17:32:30 2014
@@ -0,0 +1,19 @@
+CREATE TABLE part(
+ p_partkey INT,
+ p_name STRING,
+ p_mfgr STRING,
+ p_brand STRING,
+ p_type STRING,
+ p_size INT,
+ p_container STRING,
+ p_retailprice DOUBLE,
+ p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
+
+
+select t1.p_name, t2.p_name
+from (select * from part order by p_size limit 10) t1 join part t2 on t1.p_partkey = t2.p_partkey and t1.p_size = t2.p_size
+where t1.p_partkey < 100000;
+
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/annotate_stats_select.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/annotate_stats_select.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/annotate_stats_select.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/annotate_stats_select.q.out Wed Apr 9 17:32:30 2014
@@ -3791,17 +3791,17 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: i1 (type: int), 11 (type: int)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ expressions: i1 (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
- value expressions: _col0 (type: int), _col1 (type: int)
+ value expressions: _col0 (type: int)
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3860,10 +3860,10 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Extract
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col0 (type: int)
outputColumnNames: _col0
@@ -3965,17 +3965,17 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: i1 (type: int), 11 (type: int)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ expressions: i1 (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
- value expressions: _col0 (type: int), _col1 (type: int)
+ value expressions: _col0 (type: int)
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4034,10 +4034,10 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Extract
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col0 (type: int), 'hello' (type: string)
outputColumnNames: _col0, _col1
@@ -4138,17 +4138,17 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: i1 (type: int), 11.0 (type: double)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+ expressions: 11.0 (type: double)
+ outputColumnNames: _col1
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
- value expressions: _col0 (type: int), _col1 (type: double)
+ value expressions: _col1 (type: double)
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4207,10 +4207,10 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Extract
- Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col1 (type: double)
outputColumnNames: _col0
@@ -4316,17 +4316,17 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: i1 (type: int), unbase64('0xe23') (type: binary)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+ expressions: i1 (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
- value expressions: _col0 (type: int), _col1 (type: binary)
+ value expressions: _col0 (type: int)
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4385,10 +4385,10 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Extract
- Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col0 (type: int), 'hello' (type: string)
outputColumnNames: _col0, _col1
@@ -4509,17 +4509,17 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: i1 (type: int), 'hello' (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE
+ expressions: 'hello' (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
- value expressions: _col0 (type: int), _col1 (type: string)
+ value expressions: _col1 (type: string)
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4578,10 +4578,10 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Extract
- Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col1 (type: string)
outputColumnNames: _col0
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join18.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join18.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join18.q.out Wed Apr 9 17:32:30 2014
@@ -53,7 +53,6 @@ STAGE PLANS:
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 1 Data size: 216 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col2 (type: bigint)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0)
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join27.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join27.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join27.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join27.q.out Wed Apr 9 17:32:30 2014
@@ -60,8 +60,8 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ expressions: _col0 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -101,7 +101,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Union
- Statistics: Num rows: 13 Data size: 2604 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 23 Data size: 2704 Basic stats: COMPLETE Column stats: NONE
Map Join Operator
condition map:
Inner Join 0 to 1
@@ -111,9 +111,9 @@ STAGE PLANS:
keys:
0 _col0 (type: string)
1 _col0 (type: string)
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 25 Data size: 2974 Basic stats: COMPLETE Column stats: NONE
Select Operator
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 25 Data size: 2974 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
mode: hash
@@ -127,16 +127,16 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key < 200) (type: boolean)
- Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
Union
- Statistics: Num rows: 13 Data size: 2604 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 23 Data size: 2704 Basic stats: COMPLETE Column stats: NONE
Map Join Operator
condition map:
Inner Join 0 to 1
@@ -146,9 +146,9 @@ STAGE PLANS:
keys:
0 _col0 (type: string)
1 _col0 (type: string)
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 25 Data size: 2974 Basic stats: COMPLETE Column stats: NONE
Select Operator
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 25 Data size: 2974 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
mode: hash
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join30.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join30.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join30.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join30.q.out Wed Apr 9 17:32:30 2014
@@ -32,19 +32,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -183,7 +183,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -199,11 +199,11 @@ STAGE PLANS:
0
1 {VALUE._col0} {VALUE._col1}
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -296,19 +296,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -400,7 +400,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -416,11 +416,11 @@ STAGE PLANS:
0
1 {VALUE._col0} {VALUE._col1}
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -513,19 +513,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -617,7 +617,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -633,11 +633,11 @@ STAGE PLANS:
0
1 {VALUE._col0} {VALUE._col1}
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -750,7 +750,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -1002,7 +1002,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -1020,11 +1020,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -1042,19 +1042,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -1166,7 +1166,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -1352,7 +1352,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -1370,11 +1370,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -1392,19 +1392,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -1514,7 +1514,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -1634,7 +1634,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -1652,11 +1652,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -1674,19 +1674,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -1796,7 +1796,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -1916,7 +1916,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -1934,11 +1934,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -1956,19 +1956,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -2078,7 +2078,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -2198,7 +2198,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -2216,11 +2216,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -2238,19 +2238,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join31.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join31.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join31.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join31.q.out Wed Apr 9 17:32:30 2014
@@ -48,7 +48,7 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
@@ -234,7 +234,7 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
Reduce Output Operator
key expressions: _col0 (type: string)
@@ -252,11 +252,11 @@ STAGE PLANS:
1 {VALUE._col0} {VALUE._col1}
2
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col3 (type: string)
outputColumnNames: _col2, _col3
- Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 127 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(hash(_col2,_col3))
mode: hash
@@ -274,19 +274,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: src
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string)
Reduce Operator Tree:
Extract
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join32.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join32.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join32.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_join32.q.out Wed Apr 9 17:32:30 2014
@@ -81,7 +81,6 @@ STAGE PLANS:
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: _col2 (type: bigint)
Local Work:
Map Reduce Local Work
Reduce Operator Tree:
@@ -182,7 +181,6 @@ STAGE PLANS:
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string)
- value expressions: _col2 (type: bigint)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0)
@@ -294,7 +292,6 @@ STAGE PLANS:
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string)
- value expressions: _col2 (type: bigint)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0)
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out Wed Apr 9 17:32:30 2014
@@ -111,8 +111,8 @@ STAGE PLANS:
predicate: (key < 6) (type: boolean)
Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
- outputColumnNames: _col0, _col1
+ expressions: key (type: int)
+ outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
Union
Statistics: Num rows: 6 Data size: 42 Basic stats: COMPLETE Column stats: NONE
@@ -150,8 +150,8 @@ STAGE PLANS:
predicate: (key < 6) (type: boolean)
Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
- outputColumnNames: _col0, _col1
+ expressions: key (type: int)
+ outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
Union
Statistics: Num rows: 6 Data size: 42 Basic stats: COMPLETE Column stats: NONE
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/count.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/count.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/count.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/count.q.out Wed Apr 9 17:32:30 2014
@@ -58,7 +58,7 @@ STAGE PLANS:
sort order: +++
Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint)
+ value expressions: _col5 (type: bigint)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0), count(DISTINCT KEY._col1:1._col0), sum(VALUE._col2)
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/distinct_stats.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/distinct_stats.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/distinct_stats.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/distinct_stats.q.out Wed Apr 9 17:32:30 2014
@@ -59,7 +59,6 @@ STAGE PLANS:
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col2 (type: bigint)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0)
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map.q.out Wed Apr 9 17:32:30 2014
@@ -40,7 +40,7 @@ STAGE PLANS:
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col2 (type: bigint), _col3 (type: double)
+ value expressions: _col3 (type: double)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0), sum(VALUE._col1)
Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map_skew.q.out?rev=1586050&r1=1586049&r2=1586050&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map_skew.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/groupby2_map_skew.q.out Wed Apr 9 17:32:30 2014
@@ -40,7 +40,7 @@ STAGE PLANS:
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col2 (type: bigint), _col3 (type: double)
+ value expressions: _col3 (type: double)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0), sum(VALUE._col1)