You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2016/05/30 14:17:18 UTC
[2/2] hive git commit: HIVE-13808: Use constant expressions to
backtrack when we create ReduceSink (Jesus Camacho Rodriguez,
reviewed by Ashutosh Chauhan)
HIVE-13808: Use constant expressions to backtrack when we create ReduceSink (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c7c4ec4c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c7c4ec4c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c7c4ec4c
Branch: refs/heads/master
Commit: c7c4ec4ce384f64cf1721714e505e212832b7ac0
Parents: 58d8203
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Mon May 30 14:48:05 2016 +0100
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Mon May 30 15:15:50 2016 +0100
----------------------------------------------------------------------
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 65 +++++++---
.../results/clientpositive/bucket_groupby.q.out | 4 +-
ql/src/test/results/clientpositive/cp_sel.q.out | 11 +-
.../results/clientpositive/decimal_stats.q.out | 3 +-
.../clientpositive/dynamic_rdd_cache.q.out | 6 +-
.../dynpart_sort_optimization_acid.q.out | 16 +--
ql/src/test/results/clientpositive/pcr.q.out | 120 +++++++++----------
.../results/clientpositive/perf/query31.q.out | 2 +-
.../results/clientpositive/perf/query64.q.out | 2 +-
.../results/clientpositive/perf/query66.q.out | 2 +-
.../results/clientpositive/perf/query75.q.out | 2 +-
.../results/clientpositive/pointlookup2.q.out | 96 +++++++--------
.../results/clientpositive/pointlookup3.q.out | 104 ++++++++--------
.../results/clientpositive/ppd_udf_case.q.out | 12 +-
.../clientpositive/reducesink_dedup.q.out | 48 ++++----
.../spark/dynamic_rdd_cache.q.out | 6 +-
.../test/results/clientpositive/spark/pcr.q.out | 64 +++++-----
.../clientpositive/spark/union_view.q.out | 20 ++--
.../clientpositive/tez/vector_coalesce.q.out | 22 ++--
.../clientpositive/tez/vector_date_1.q.out | 5 +-
.../clientpositive/tez/vector_decimal_2.q.out | 85 ++++---------
.../tez/vector_decimal_round_2.q.out | 8 +-
.../clientpositive/tez/vector_interval_1.q.out | 10 +-
.../tez/vector_interval_arithmetic.q.out | 8 +-
.../results/clientpositive/union_view.q.out | 20 ++--
.../clientpositive/vector_coalesce.q.out | 22 ++--
.../results/clientpositive/vector_date_1.q.out | 5 +-
.../clientpositive/vector_decimal_2.q.out | 85 ++++---------
.../clientpositive/vector_decimal_round_2.q.out | 8 +-
.../clientpositive/vector_interval_1.q.out | 10 +-
.../vector_interval_arithmetic.q.out | 8 +-
31 files changed, 390 insertions(+), 489 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 265dd7e..c3d903b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -78,7 +78,6 @@ import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.exec.FetchOperator;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
@@ -129,7 +128,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
import org.apache.hadoop.hive.ql.optimizer.calcite.translator.HiveOpConverterPostProc;
import org.apache.hadoop.hive.ql.optimizer.lineage.Generator;
-import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec.SpecType;
import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher;
@@ -183,7 +181,6 @@ import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -7589,7 +7586,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
}
Operator result = genReduceSinkPlan(
input, partCols, sortCols, order.toString(), nullOrder.toString(),
- numReducers, Operation.NOT_ACID);
+ numReducers, Operation.NOT_ACID, true);
if (result.getParentOperators().size() == 1 &&
result.getParentOperators().get(0) instanceof ReduceSinkOperator) {
((ReduceSinkOperator) result.getParentOperators().get(0))
@@ -7598,21 +7595,41 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
return result;
}
+ private Operator genReduceSinkPlan(Operator<?> input,
+ ArrayList<ExprNodeDesc> partitionCols, ArrayList<ExprNodeDesc> sortCols,
+ String sortOrder, String nullOrder, int numReducers, AcidUtils.Operation acidOp)
+ throws SemanticException {
+ return genReduceSinkPlan(input, partitionCols, sortCols, sortOrder, nullOrder, numReducers,
+ acidOp, false);
+ }
+
@SuppressWarnings("nls")
private Operator genReduceSinkPlan(Operator<?> input,
ArrayList<ExprNodeDesc> partitionCols, ArrayList<ExprNodeDesc> sortCols,
- String sortOrder, String nullOrder, int numReducers, AcidUtils.Operation acidOp)
- throws SemanticException {
+ String sortOrder, String nullOrder, int numReducers, AcidUtils.Operation acidOp,
+ boolean pullConstants) throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
Operator dummy = Operator.createDummy();
dummy.setParentOperators(Arrays.asList(input));
+ ArrayList<ExprNodeDesc> newSortCols = new ArrayList<ExprNodeDesc>();
+ StringBuilder newSortOrder = new StringBuilder();
+ StringBuilder newNullOrder = new StringBuilder();
ArrayList<ExprNodeDesc> sortColsBack = new ArrayList<ExprNodeDesc>();
- for (ExprNodeDesc sortCol : sortCols) {
- sortColsBack.add(ExprNodeDescUtils.backtrack(sortCol, dummy, input));
+ for (int i = 0; i < sortCols.size(); i++) {
+ ExprNodeDesc sortCol = sortCols.get(i);
+ // If we are not pulling constants, OR
+ // we are pulling constants but this is not a constant
+ if (!pullConstants || !(sortCol instanceof ExprNodeConstantDesc)) {
+ newSortCols.add(sortCol);
+ newSortOrder.append(sortOrder.charAt(i));
+ newNullOrder.append(nullOrder.charAt(i));
+ sortColsBack.add(ExprNodeDescUtils.backtrack(sortCol, dummy, input));
+ }
}
+
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
RowResolver rsRR = new RowResolver();
@@ -7620,6 +7637,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueColsBack = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
+ ArrayList<ExprNodeDesc> constantCols = new ArrayList<ExprNodeDesc>();
ArrayList<ColumnInfo> columnInfos = inputRR.getColumnInfos();
@@ -7632,6 +7650,12 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
// backtrack can be null when input is script operator
ExprNodeDesc valueBack = ExprNodeDescUtils.backtrack(value, dummy, input);
+ if (pullConstants && valueBack instanceof ExprNodeConstantDesc) {
+ // ignore, it will be generated by SEL op
+ index[i] = Integer.MAX_VALUE;
+ constantCols.add(valueBack);
+ continue;
+ }
int kindex = valueBack == null ? -1 : ExprNodeDescUtils.indexOf(valueBack, sortColsBack);
if (kindex >= 0) {
index[i] = kindex;
@@ -7668,8 +7692,9 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
dummy.setParentOperators(null);
- ReduceSinkDesc rsdesc = PlanUtils.getReduceSinkDesc(sortCols, valueCols, outputColumns,
- false, -1, partitionCols, sortOrder, nullOrder, numReducers, acidOp);
+ ReduceSinkDesc rsdesc = PlanUtils.getReduceSinkDesc(newSortCols, valueCols, outputColumns,
+ false, -1, partitionCols, newSortOrder.toString(), newNullOrder.toString(),
+ numReducers, acidOp);
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(rsdesc,
new RowSchema(rsRR.getColumnInfos()), input), rsRR);
@@ -7688,23 +7713,29 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
ArrayList<String> selOutputCols = new ArrayList<String>();
Map<String, ExprNodeDesc> selColExprMap = new HashMap<String, ExprNodeDesc>();
+ Iterator<ExprNodeDesc> constants = constantCols.iterator();
for (int i = 0; i < index.length; i++) {
ColumnInfo prev = columnInfos.get(i);
String[] nm = inputRR.reverseLookup(prev.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(prev.getInternalName());
ColumnInfo info = new ColumnInfo(prev);
- String field;
- if (index[i] >= 0) {
- field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
+ ExprNodeDesc desc;
+ if (index[i] == Integer.MAX_VALUE) {
+ desc = constants.next();
} else {
- field = Utilities.ReduceField.VALUE + "." + valueColNames.get(-index[i] - 1);
+ String field;
+ if (index[i] >= 0) {
+ field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
+ } else {
+ field = Utilities.ReduceField.VALUE + "." + valueColNames.get(-index[i] - 1);
+ }
+ desc = new ExprNodeColumnDesc(info.getType(),
+ field, info.getTabAlias(), info.getIsVirtualCol());
}
- String internalName = getColumnInternalName(i);
- ExprNodeColumnDesc desc = new ExprNodeColumnDesc(info.getType(),
- field, info.getTabAlias(), info.getIsVirtualCol());
selCols.add(desc);
+ String internalName = getColumnInternalName(i);
info.setInternalName(internalName);
selectRR.put(nm[0], nm[1], info);
if (nm2 != null) {
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/bucket_groupby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_groupby.q.out b/ql/src/test/results/clientpositive/bucket_groupby.q.out
index 1afab38..e198617 100644
--- a/ql/src/test/results/clientpositive/bucket_groupby.q.out
+++ b/ql/src/test/results/clientpositive/bucket_groupby.q.out
@@ -467,8 +467,8 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: _col0 (type: string), 3 (type: int)
- sort order: ++
+ key expressions: _col0 (type: string)
+ sort order: +
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
value expressions: _col1 (type: bigint)
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/cp_sel.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cp_sel.q.out b/ql/src/test/results/clientpositive/cp_sel.q.out
index 9ee9754..6300578 100644
--- a/ql/src/test/results/clientpositive/cp_sel.q.out
+++ b/ql/src/test/results/clientpositive/cp_sel.q.out
@@ -16,18 +16,17 @@ STAGE PLANS:
alias: srcpart
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), 'hello' (type: string), 'world' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
+ expressions: key (type: string), value (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: 1 (type: int)
- sort order: +
+ sort order:
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
- value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+ value expressions: _col0 (type: string), _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
+ expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), 'hello' (type: string), 'world' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Limit
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/decimal_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/decimal_stats.q.out b/ql/src/test/results/clientpositive/decimal_stats.q.out
index 72806f0..6bcf3fa 100644
--- a/ql/src/test/results/clientpositive/decimal_stats.q.out
+++ b/ql/src/test/results/clientpositive/decimal_stats.q.out
@@ -69,8 +69,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
- key expressions: 1 (type: int)
- sort order: +
+ sort order:
Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE
TopN Hash Memory Usage: 0.1
value expressions: _col0 (type: decimal(4,2)), _col1 (type: decimal(5,0)), _col2 (type: decimal(10,0))
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
index 86346b3..4088a39 100644
--- a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
@@ -1139,13 +1139,13 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: _col0 (type: int), _col1 (type: int), 3 (type: int), _col3 (type: double), _col4 (type: double), 4 (type: int), _col8 (type: double), _col9 (type: double)
- sort order: ++++++++
+ key expressions: _col0 (type: int), _col1 (type: int), _col3 (type: double), _col4 (type: double), _col8 (type: double), _col9 (type: double)
+ sort order: ++++++
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
value expressions: _col5 (type: int), _col6 (type: int)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: int), KEY.reducesinkkey3 (type: double), KEY.reducesinkkey4 (type: double), VALUE._col0 (type: int), VALUE._col1 (type: int), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey6 (type: double), KEY.reducesinkkey7 (type: double)
+ expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: int), 3 (type: int), KEY.reducesinkkey2 (type: double), KEY.reducesinkkey3 (type: double), VALUE._col0 (type: int), VALUE._col1 (type: int), 4 (type: int), KEY.reducesinkkey4 (type: double), KEY.reducesinkkey5 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out b/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
index ac95ec2..de08dc5 100644
--- a/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
+++ b/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
@@ -164,7 +164,7 @@ STAGE PLANS:
value expressions: _col3 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), 'foo' (type: string), 'bar' (type: string), VALUE._col2 (type: string)
+ expressions: KEY.reducesinkkey0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), 'foo' (type: string), 'bar' (type: string), VALUE._col1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
File Output Operator
compressed: false
@@ -644,7 +644,7 @@ STAGE PLANS:
value expressions: _col4 (type: int)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string), VALUE._col3 (type: int)
+ expressions: KEY.reducesinkkey0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), 'foo' (type: string), 'bar' (type: string), '2008-04-08' (type: string), VALUE._col2 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
File Output Operator
compressed: false
@@ -1063,13 +1063,13 @@ STAGE PLANS:
Filter Operator
predicate: (key = 'foo') (type: boolean)
Select Operator
- expressions: ROW__ID (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), key (type: string), 'bar' (type: string), ds (type: string), hr (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ expressions: ROW__ID (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), key (type: string), ds (type: string), hr (type: int)
+ outputColumnNames: _col0, _col1, _col3, _col4
Reduce Output Operator
key expressions: _col3 (type: string), _col4 (type: int), '_bucket_number' (type: string), _col0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>)
sort order: ++++
Map-reduce partition columns: _col3 (type: string), _col4 (type: int)
- value expressions: _col1 (type: string), _col2 (type: string)
+ value expressions: _col1 (type: string), 'bar' (type: string)
Reduce Operator Tree:
Select Operator
expressions: KEY._col0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY._col3 (type: string), KEY._col4 (type: int), KEY.'_bucket_number' (type: string)
@@ -1137,13 +1137,13 @@ STAGE PLANS:
Filter Operator
predicate: (key = 'foo') (type: boolean)
Select Operator
- expressions: ROW__ID (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), key (type: string), 'bar' (type: string), ds (type: string), hr (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ expressions: ROW__ID (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), key (type: string), ds (type: string), hr (type: int)
+ outputColumnNames: _col0, _col1, _col3, _col4
Reduce Output Operator
key expressions: _col3 (type: string), _col4 (type: int), '_bucket_number' (type: string), _col0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>)
sort order: ++++
Map-reduce partition columns: _col3 (type: string), _col4 (type: int)
- value expressions: _col1 (type: string), _col2 (type: string)
+ value expressions: _col1 (type: string), 'bar' (type: string)
Reduce Operator Tree:
Select Operator
expressions: KEY._col0 (type: struct<transactionid:bigint,bucketid:int,rowid:bigint>), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY._col3 (type: string), KEY._col4 (type: int), KEY.'_bucket_number' (type: string)
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out
index 9fb1481..2a5d579 100644
--- a/ql/src/test/results/clientpositive/pcr.q.out
+++ b/ql/src/test/results/clientpositive/pcr.q.out
@@ -1400,9 +1400,9 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: 14 (type: int), _col1 (type: string)
- null sort order: aa
- sort order: ++
+ key expressions: _col1 (type: string)
+ null sort order: a
+ sort order: +
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
tag: -1
auto parallelism: false
@@ -1505,7 +1505,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string)
+ expressions: 14 (type: int), KEY.reducesinkkey0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2327,27 +2327,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -2360,7 +2356,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -2371,8 +2367,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -2380,8 +2376,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -2390,7 +2386,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col1 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2631,27 +2627,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string), '2000-04-09' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -2664,7 +2656,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string), _col5 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -2675,8 +2667,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -2684,8 +2676,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -2694,7 +2686,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col4 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-09' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4745,9 +4737,9 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), '2008-04-08' (type: string), _col3 (type: string)
- null sort order: aaa
- sort order: +++
+ key expressions: _col0 (type: string), _col3 (type: string)
+ null sort order: aa
+ sort order: ++
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
@@ -4853,7 +4845,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4924,9 +4916,9 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), _col2 (type: string), '11' (type: string)
- null sort order: aaa
- sort order: +++
+ key expressions: _col0 (type: string), _col2 (type: string)
+ null sort order: aa
+ sort order: ++
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
@@ -5032,7 +5024,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), '11' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/perf/query31.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query31.q.out b/ql/src/test/results/clientpositive/perf/query31.q.out
index 935631c..433d419 100644
--- a/ql/src/test/results/clientpositive/perf/query31.q.out
+++ b/ql/src/test/results/clientpositive/perf/query31.q.out
@@ -39,7 +39,7 @@ Stage-0
<-Reducer 6 [SIMPLE_EDGE]
SHUFFLE [RS_138]
Select Operator [SEL_137] (rows=316243230 width=88)
- Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
+ Output:["_col0","_col2","_col3","_col4","_col5"]
Filter Operator [FIL_136] (rows=316243230 width=88)
predicate:CASE WHEN ((_col3 > 0)) THEN (CASE WHEN ((_col9 > 0)) THEN (((_col11 / _col9) > (_col5 / _col3))) ELSE ((null > (_col5 / _col3))) END) ELSE (CASE WHEN ((_col9 > 0)) THEN (((_col11 / _col9) > null)) ELSE (null) END) END
Merge Join Operator [MERGEJOIN_272] (rows=632486460 width=88)
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/perf/query64.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query64.q.out b/ql/src/test/results/clientpositive/perf/query64.q.out
index 5a5ee74..5c931c0 100644
--- a/ql/src/test/results/clientpositive/perf/query64.q.out
+++ b/ql/src/test/results/clientpositive/perf/query64.q.out
@@ -59,7 +59,7 @@ Stage-0
<-Reducer 5 [SIMPLE_EDGE]
SHUFFLE [RS_265]
Select Operator [SEL_264] (rows=331415616 width=88)
- Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20"]
+ Output:["_col0","_col1","_col10","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col2","_col20","_col3","_col4","_col5","_col6","_col7","_col8","_col9"]
Filter Operator [FIL_263] (rows=331415616 width=88)
predicate:(_col30 <= _col13)
Merge Join Operator [MERGEJOIN_658] (rows=994246850 width=88)
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/perf/query66.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query66.q.out b/ql/src/test/results/clientpositive/perf/query66.q.out
index 5eef7e7..8f62292 100644
--- a/ql/src/test/results/clientpositive/perf/query66.q.out
+++ b/ql/src/test/results/clientpositive/perf/query66.q.out
@@ -465,7 +465,7 @@ Stage-0
<-Reducer 8 [SIMPLE_EDGE]
SHUFFLE [RS_73]
Select Operator [SEL_72] (rows=158120068 width=135)
- Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29","_col30","_col31","_col32","_col33","_col34","_col35","_col36","_col37","_col38","_col39","_col40","_col41","_col42","_col43"]
+ Output:["_col0","_col1","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col2","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29","_col3","_col30","_col31","_col32","_col33","_col34","_col35","_col36","_col37","_col38","_col39","_col4","_col40","_col41","_col42","_col43","_col5","_col8","_col9"]
Group By Operator [GBY_71] (rows=158120068 width=135)
Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29","_col30","_col31","_col32","_col33","_col34","_col35","_col36","_col37","_col38","_col39","_col40","_col41"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)","sum(VALUE._col4)","sum(VALUE._col5)","sum(VALUE._col6)","sum(VALUE._col7)","sum(VALUE._col8)","sum(VALUE._col9)","sum(VALUE._col10)","sum(VALUE._col11)","sum(VALUE._col12)","sum(VALUE._col13)","sum(VALUE._col14)","sum(VALUE._col15)","sum(VALUE._col16)","sum(VALUE._col17)","sum(VALUE._col18)","sum(VALUE._col19)","sum(VALUE._col20)","sum(VALUE._col21)","sum(VALUE._col22)","sum(VALUE._col23)","sum(VALUE._col24)","sum(VALUE._col25)","sum(VALUE._col26)","sum(VALUE._col27)","sum(VALUE._col28)","sum(VALUE._col29)
","sum(VALUE._col30)","sum(VALUE._col31)","sum(VALUE._col32)","sum(VALUE._col33)","sum(VALUE._col34)","sum(VALUE._col35)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4, KEY._col5
<-Union 7 [SIMPLE_EDGE]
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/perf/query75.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query75.q.out b/ql/src/test/results/clientpositive/perf/query75.q.out
index 802c1af..4279f54 100644
--- a/ql/src/test/results/clientpositive/perf/query75.q.out
+++ b/ql/src/test/results/clientpositive/perf/query75.q.out
@@ -41,7 +41,7 @@ Stage-0
<-Reducer 7 [SIMPLE_EDGE]
SHUFFLE [RS_153]
Select Operator [SEL_152] (rows=245965926 width=108)
- Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9"]
+ Output:["_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9"]
Filter Operator [FIL_151] (rows=245965926 width=108)
predicate:(UDFToDouble((CAST( _col4 AS decimal(17,2)) / CAST( _col10 AS decimal(17,2)))) < 0.9)
Merge Join Operator [MERGEJOIN_259] (rows=737897778 width=108)
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/pointlookup2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pointlookup2.q.out b/ql/src/test/results/clientpositive/pointlookup2.q.out
index 5facf9a..dfcf14e 100644
--- a/ql/src/test/results/clientpositive/pointlookup2.q.out
+++ b/ql/src/test/results/clientpositive/pointlookup2.q.out
@@ -379,27 +379,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -412,7 +408,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -423,8 +419,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -432,8 +428,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4
- columns.types int,string,string,int,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -442,7 +438,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col1 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -643,27 +639,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string), '2000-04-09' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -676,7 +668,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string), _col5 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -687,8 +679,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -696,8 +688,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5
- columns.types int,string,string,int,string,string
+ columns _col0,_col1,_col3,_col4
+ columns.types int,string,int,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -706,7 +698,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col4 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-09' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/pointlookup3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pointlookup3.q.out b/ql/src/test/results/clientpositive/pointlookup3.q.out
index def1bc8..35c2094 100644
--- a/ql/src/test/results/clientpositive/pointlookup3.q.out
+++ b/ql/src/test/results/clientpositive/pointlookup3.q.out
@@ -245,9 +245,9 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), '2001-04-08' (type: string)
- null sort order: aaaa
- sort order: ++++
+ key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string)
+ null sort order: aaa
+ sort order: +++
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
tag: -1
auto parallelism: false
@@ -305,7 +305,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), '2001-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -461,27 +461,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col6
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 (type: string), '2001-04-08' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4,_col5,_col6
+ columns.types int,string,string,int,string,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -494,7 +490,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 (type: string), _col7 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -505,8 +501,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
+ columns _col0,_col1,_col3,_col4,_col5,_col6
+ columns.types int,string,string,int,string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -514,8 +510,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
+ columns _col0,_col1,_col3,_col4,_col5,_col6
+ columns.types int,string,string,int,string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -524,7 +520,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: string), VALUE._col6 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col4 (type: string), '2001-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -727,27 +723,23 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col7
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), '2000-04-09' (type: string), _col7 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
- escape.delim \
- serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ properties:
+ columns _col0,_col1,_col3,_col4,_col5,_col7
+ columns.types int,string,string,int,string,string
+ escape.delim \
+ serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-2
Map Reduce
@@ -760,7 +752,7 @@ STAGE PLANS:
sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 (type: string), _col7 (type: string)
+ value expressions: _col1 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col7 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
@@ -771,8 +763,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
+ columns _col0,_col1,_col3,_col4,_col5,_col7
+ columns.types int,string,string,int,string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -780,8 +772,8 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
- columns.types int,string,string,string,int,string,string,string
+ columns _col0,_col1,_col3,_col4,_col5,_col7
+ columns.types int,string,string,int,string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -790,7 +782,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: string), VALUE._col6 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), '2000-04-09' (type: string), VALUE._col4 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/ppd_udf_case.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_udf_case.q.out b/ql/src/test/results/clientpositive/ppd_udf_case.q.out
index 56941cd..e444ef7 100644
--- a/ql/src/test/results/clientpositive/ppd_udf_case.q.out
+++ b/ql/src/test/results/clientpositive/ppd_udf_case.q.out
@@ -83,12 +83,12 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: '27' (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string), '27' (type: string), _col5 (type: string), '2008-04-08' (type: string), _col7 (type: string)
- sort order: ++++++++
+ key expressions: _col1 (type: string), _col3 (type: string), _col5 (type: string), _col7 (type: string)
+ sort order: ++++
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey5 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey7 (type: string)
+ expressions: '27' (type: string), KEY.reducesinkkey0 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey1 (type: string), '27' (type: string), KEY.reducesinkkey2 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey3 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -227,12 +227,12 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: '27' (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string), '27' (type: string), _col5 (type: string), '2008-04-08' (type: string), _col7 (type: string)
- sort order: ++++++++
+ key expressions: _col1 (type: string), _col3 (type: string), _col5 (type: string), _col7 (type: string)
+ sort order: ++++
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey5 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey7 (type: string)
+ expressions: '27' (type: string), KEY.reducesinkkey0 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey1 (type: string), '27' (type: string), KEY.reducesinkkey2 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey3 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/reducesink_dedup.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/reducesink_dedup.q.out b/ql/src/test/results/clientpositive/reducesink_dedup.q.out
index 77bffff..b89df52 100644
--- a/ql/src/test/results/clientpositive/reducesink_dedup.q.out
+++ b/ql/src/test/results/clientpositive/reducesink_dedup.q.out
@@ -10,29 +10,29 @@ distribute by 1 sort by 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@part
#### A masked pattern was here ####
-almond azure blanched chiffon midnight
-almond aquamarine dodger light gainsboro
-almond antique sky peru orange
-almond antique medium spring khaki
-almond antique blue firebrick mint
-almond azure aquamarine papaya violet
-almond aquamarine yellow dodger mint
-almond aquamarine floral ivory bisque
-almond antique violet mint lemon
-almond antique gainsboro frosted violet
-almond antique olive coral navajo
-almond antique misty red olive
-almond antique metallic orange dim
-almond antique forest lavender goldenrod
-almond antique chartreuse khaki white
-almond aquamarine sandy cyan gainsboro
-almond aquamarine rose maroon antique
-almond aquamarine midnight light salmon
-almond antique violet turquoise frosted
-almond antique violet chocolate turquoise
-almond aquamarine pink moccasin thistle
-almond aquamarine burnished black steel
-almond antique salmon chartreuse burlywood
-almond antique chartreuse lavender yellow
almond antique burnished rose metallic
almond antique burnished rose metallic
+almond antique chartreuse lavender yellow
+almond antique salmon chartreuse burlywood
+almond aquamarine burnished black steel
+almond aquamarine pink moccasin thistle
+almond antique violet chocolate turquoise
+almond antique violet turquoise frosted
+almond aquamarine midnight light salmon
+almond aquamarine rose maroon antique
+almond aquamarine sandy cyan gainsboro
+almond antique chartreuse khaki white
+almond antique forest lavender goldenrod
+almond antique metallic orange dim
+almond antique misty red olive
+almond antique olive coral navajo
+almond antique gainsboro frosted violet
+almond antique violet mint lemon
+almond aquamarine floral ivory bisque
+almond aquamarine yellow dodger mint
+almond azure aquamarine papaya violet
+almond antique blue firebrick mint
+almond antique medium spring khaki
+almond antique sky peru orange
+almond aquamarine dodger light gainsboro
+almond azure blanched chiffon midnight
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 86c5bd7..b290fa1 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -1046,14 +1046,14 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col6, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: int), _col1 (type: int), 3 (type: int), _col3 (type: double), _col4 (type: double), 4 (type: int), _col8 (type: double), _col9 (type: double)
- sort order: ++++++++
+ key expressions: _col0 (type: int), _col1 (type: int), _col3 (type: double), _col4 (type: double), _col8 (type: double), _col9 (type: double)
+ sort order: ++++++
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
value expressions: _col5 (type: int), _col6 (type: int)
Reducer 7
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: int), KEY.reducesinkkey3 (type: double), KEY.reducesinkkey4 (type: double), VALUE._col0 (type: int), VALUE._col1 (type: int), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey6 (type: double), KEY.reducesinkkey7 (type: double)
+ expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: int), 3 (type: int), KEY.reducesinkkey2 (type: double), KEY.reducesinkkey3 (type: double), VALUE._col0 (type: int), VALUE._col1 (type: int), 4 (type: int), KEY.reducesinkkey4 (type: double), KEY.reducesinkkey5 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/spark/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/pcr.q.out b/ql/src/test/results/clientpositive/spark/pcr.q.out
index cbebbdd..cb3e288 100644
--- a/ql/src/test/results/clientpositive/spark/pcr.q.out
+++ b/ql/src/test/results/clientpositive/spark/pcr.q.out
@@ -1441,9 +1441,9 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: 14 (type: int), _col1 (type: string)
- null sort order: aa
- sort order: ++
+ key expressions: _col1 (type: string)
+ null sort order: a
+ sort order: +
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
tag: -1
auto parallelism: false
@@ -1547,7 +1547,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string)
+ expressions: 14 (type: int), KEY.reducesinkkey0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2445,23 +2445,19 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string)
- auto parallelism: false
+ tag: -1
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
+ auto parallelism: false
Reducer 3
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col1 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2714,23 +2710,19 @@ STAGE PLANS:
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3, _col4
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: string), '2000-04-08' (type: string), _col3 (type: int), _col4 (type: string), '2000-04-09' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
- tag: -1
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: int), _col4 (type: string), _col5 (type: string)
- auto parallelism: false
+ tag: -1
+ value expressions: _col1 (type: string), _col3 (type: int), _col4 (type: string)
+ auto parallelism: false
Reducer 3
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: int), VALUE._col3 (type: string), VALUE._col4 (type: string)
+ expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), '2000-04-08' (type: string), VALUE._col1 (type: int), VALUE._col2 (type: string), '2000-04-09' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4058,9 +4050,9 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), '2008-04-08' (type: string), _col3 (type: string)
- null sort order: aaa
- sort order: +++
+ key expressions: _col0 (type: string), _col3 (type: string)
+ null sort order: aa
+ sort order: ++
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
@@ -4167,7 +4159,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4243,9 +4235,9 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), _col2 (type: string), '11' (type: string)
- null sort order: aaa
- sort order: +++
+ key expressions: _col0 (type: string), _col2 (type: string)
+ null sort order: aa
+ sort order: ++
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
@@ -4352,7 +4344,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), '11' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/c7c4ec4c/ql/src/test/results/clientpositive/spark/union_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union_view.q.out b/ql/src/test/results/clientpositive/spark/union_view.q.out
index 7ac641f..f06b080 100644
--- a/ql/src/test/results/clientpositive/spark/union_view.q.out
+++ b/ql/src/test/results/clientpositive/spark/union_view.q.out
@@ -541,14 +541,14 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: 86 (type: int), _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col0 (type: string), _col1 (type: string)
+ outputColumnNames: _col1, _col2
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col2 (type: string)
sort order: +
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string)
+ value expressions: _col1 (type: string)
Map 3
Map Operator Tree:
TableScan
@@ -563,14 +563,14 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: 86 (type: int), _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col0 (type: string), _col1 (type: string)
+ outputColumnNames: _col1, _col2
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col2 (type: string)
sort order: +
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string)
+ value expressions: _col1 (type: string)
Map 4
Map Operator Tree:
TableScan
@@ -585,18 +585,18 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: 86 (type: int), _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col0 (type: string), _col1 (type: string)
+ outputColumnNames: _col1, _col2
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col2 (type: string)
sort order: +
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string)
+ value expressions: _col1 (type: string)
Reducer 2
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY.reducesinkkey0 (type: string)
+ expressions: 86 (type: int), VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE
File Output Operator