You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2014/12/30 08:21:43 UTC
svn commit: r1648485 [1/6] - in /hive/trunk:
contrib/src/test/results/clientpositive/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/te...
Author: navis
Date: Tue Dec 30 07:21:41 2014
New Revision: 1648485
URL: http://svn.apache.org/r1648485
Log:
HIVE-9195 : CBO changes constant to column type (Navis reviwed by Ashutosh Chauhan)
Modified:
hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out
hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnListDesc.java
hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out
hive/trunk/ql/src/test/results/clientpositive/auto_join26.q.out
hive/trunk/ql/src/test/results/clientpositive/auto_join27.q.out
hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer10.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer6.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer7.q.out
hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer8.q.out
hive/trunk/ql/src/test/results/clientpositive/count.q.out
hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out
hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
hive/trunk/ql/src/test/results/clientpositive/dynpart_sort_optimization2.q.out
hive/trunk/ql/src/test/results/clientpositive/explain_logical.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_position.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_11.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_2.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_3.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_4.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_5.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_6.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_7.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_9.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
hive/trunk/ql/src/test/results/clientpositive/groupby_sort_test_1.q.out
hive/trunk/ql/src/test/results/clientpositive/input24.q.out
hive/trunk/ql/src/test/results/clientpositive/input30.q.out
hive/trunk/ql/src/test/results/clientpositive/input31.q.out
hive/trunk/ql/src/test/results/clientpositive/input32.q.out
hive/trunk/ql/src/test/results/clientpositive/join29.q.out
hive/trunk/ql/src/test/results/clientpositive/join31.q.out
hive/trunk/ql/src/test/results/clientpositive/join35.q.out
hive/trunk/ql/src/test/results/clientpositive/join40.q.out
hive/trunk/ql/src/test/results/clientpositive/limit_pushdown.q.out
hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
hive/trunk/ql/src/test/results/clientpositive/merge1.q.out
hive/trunk/ql/src/test/results/clientpositive/merge2.q.out
hive/trunk/ql/src/test/results/clientpositive/metadata_only_queries.q.out
hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out
hive/trunk/ql/src/test/results/clientpositive/nullgroup2.q.out
hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
hive/trunk/ql/src/test/results/clientpositive/nullgroup4.q.out
hive/trunk/ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out
hive/trunk/ql/src/test/results/clientpositive/partition_boolexpr.q.out
hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
hive/trunk/ql/src/test/results/clientpositive/ppd_gby_join.q.out
hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx_cbo_1.q.out
hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx_cbo_2.q.out
hive/trunk/ql/src/test/results/clientpositive/stats1.q.out
hive/trunk/ql/src/test/results/clientpositive/subq2.q.out
hive/trunk/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/count.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/dynpart_sort_optimization2.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/limit_pushdown.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/merge1.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/merge2.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/metadata_only_queries.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union2.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union3.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union4.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union5.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union6.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union7.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/union9.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out
hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out
hive/trunk/ql/src/test/results/clientpositive/type_widening.q.out
hive/trunk/ql/src/test/results/clientpositive/udaf_number_format.q.out
hive/trunk/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out
hive/trunk/ql/src/test/results/clientpositive/udf3.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_count.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_reflect2.q.out
hive/trunk/ql/src/test/results/clientpositive/union10.q.out
hive/trunk/ql/src/test/results/clientpositive/union11.q.out
hive/trunk/ql/src/test/results/clientpositive/union12.q.out
hive/trunk/ql/src/test/results/clientpositive/union14.q.out
hive/trunk/ql/src/test/results/clientpositive/union15.q.out
hive/trunk/ql/src/test/results/clientpositive/union16.q.out
hive/trunk/ql/src/test/results/clientpositive/union2.q.out
hive/trunk/ql/src/test/results/clientpositive/union20.q.out
hive/trunk/ql/src/test/results/clientpositive/union24.q.out
hive/trunk/ql/src/test/results/clientpositive/union25.q.out
hive/trunk/ql/src/test/results/clientpositive/union3.q.out
hive/trunk/ql/src/test/results/clientpositive/union4.q.out
hive/trunk/ql/src/test/results/clientpositive/union5.q.out
hive/trunk/ql/src/test/results/clientpositive/union6.q.out
hive/trunk/ql/src/test/results/clientpositive/union7.q.out
hive/trunk/ql/src/test/results/clientpositive/union9.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_1.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_10.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_15.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_16.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_18.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_19.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_2.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_20.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_22.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_24.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_25.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_4.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_5.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_7.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_8.q.out
hive/trunk/ql/src/test/results/clientpositive/union_remove_9.q.out
hive/trunk/ql/src/test/results/clientpositive/union_view.q.out
hive/trunk/ql/src/test/results/clientpositive/vector_cast_constant.q.out
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_2.q.out
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_udf.q.out
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableVoidObjectInspector.java
Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out Tue Dec 30 07:21:41 2014
@@ -26,11 +26,11 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: substr(value, 5, 1) (type: string), '(' (type: string), key (type: string), ':' (type: string), value (type: string), ')' (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ expressions: substr(value, 5, 1) (type: string), key (type: string), value (type: string)
+ outputColumnNames: _col0, _col2, _col4
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: example_group_concat(_col1, _col2, _col3, _col4, _col5)
+ aggregations: example_group_concat('(', _col2, ':', _col4, ')')
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out Tue Dec 30 07:21:41 2014
@@ -26,11 +26,11 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: substr(value, 5) (type: string), 10 (type: int), if((UDFToDouble(substr(value, 5)) > 250.0), null, substr(value, 5)) (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: substr(value, 5) (type: string), if((UDFToDouble(substr(value, 5)) > 250.0), null, substr(value, 5)) (type: string)
+ outputColumnNames: _col0, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: example_max_n(_col0, _col1), example_max_n(_col2, _col1)
+ aggregations: example_max_n(_col0, 10), example_max_n(_col2, 10)
mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out Tue Dec 30 07:21:41 2014
@@ -26,11 +26,11 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: substr(value, 5) (type: string), 10 (type: int), if((UDFToDouble(substr(value, 5)) < 250.0), null, substr(value, 5)) (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: substr(value, 5) (type: string), if((UDFToDouble(substr(value, 5)) < 250.0), null, substr(value, 5)) (type: string)
+ outputColumnNames: _col0, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: example_min_n(_col0, _col1), example_min_n(_col2, _col1)
+ aggregations: example_min_n(_col0, 10), example_min_n(_col2, 10)
mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java Tue Dec 30 07:21:41 2014
@@ -389,8 +389,11 @@ public class RexNodeConverter {
}
calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
break;
- case BINARY:
case VOID:
+ calciteLiteral = cluster.getRexBuilder().makeLiteral(null,
+ cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
+ break;
+ case BINARY:
case UNKNOWN:
default:
throw new RuntimeException("UnSupported Literal");
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Tue Dec 30 07:21:41 2014
@@ -62,8 +62,10 @@ import org.apache.hadoop.hive.ql.udf.gen
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -513,9 +515,7 @@ public class TypeCheckProcFactory {
return null;
}
// It's a column.
- return new ExprNodeColumnDesc(colInfo.getType(), colInfo
- .getInternalName(), colInfo.getTabAlias(), colInfo
- .getIsVirtualCol());
+ return toExprNodeDesc(colInfo);
} else {
// It's a table alias.
// We will process that later in DOT.
@@ -547,11 +547,7 @@ public class TypeCheckProcFactory {
}
} else {
// It's a column.
- ExprNodeColumnDesc exprNodColDesc = new ExprNodeColumnDesc(colInfo.getType(), colInfo
- .getInternalName(), colInfo.getTabAlias(), colInfo
- .getIsVirtualCol());
- exprNodColDesc.setSkewedCol(colInfo.isSkewedCol());
- return exprNodColDesc;
+ return toExprNodeDesc(colInfo);
}
}
@@ -559,6 +555,22 @@ public class TypeCheckProcFactory {
}
+ private static ExprNodeDesc toExprNodeDesc(ColumnInfo colInfo) {
+ ObjectInspector inspector = colInfo.getObjectInspector();
+ if (inspector instanceof ConstantObjectInspector &&
+ inspector instanceof PrimitiveObjectInspector) {
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
+ Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
+ return new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
+ }
+ // non-constant or non-primitive constants
+ ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo.getType(), colInfo
+ .getInternalName(), colInfo.getTabAlias(), colInfo
+ .getIsVirtualCol());
+ column.setSkewedCol(colInfo.isSkewedCol());
+ return column;
+ }
+
/**
* Factory method to get ColumnExprProcessor.
*
@@ -979,7 +991,7 @@ public class TypeCheckProcFactory {
return false;
}
- protected ExprNodeColumnDesc processQualifiedColRef(TypeCheckCtx ctx, ASTNode expr,
+ protected ExprNodeDesc processQualifiedColRef(TypeCheckCtx ctx, ASTNode expr,
Object... nodeOutputs) throws SemanticException {
RowResolver input = ctx.getInputRR();
String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getChild(0)
@@ -993,8 +1005,7 @@ public class TypeCheckProcFactory {
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
return null;
}
- return new ExprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
- colInfo.getTabAlias(), colInfo.getIsVirtualCol());
+ return toExprNodeDesc(colInfo);
}
@Override
@@ -1080,16 +1091,14 @@ public class TypeCheckProcFactory {
for (Map.Entry<String, ColumnInfo> colMap : columns.entrySet()) {
ColumnInfo colInfo = colMap.getValue();
if (!colInfo.getIsVirtualCol()) {
- columnList.addColumn(new ExprNodeColumnDesc(colInfo.getType(),
- colInfo.getInternalName(), colInfo.getTabAlias(), false));
+ columnList.addColumn(toExprNodeDesc(colInfo));
}
}
} else {
// all columns (select *, for example)
for (ColumnInfo colInfo : input.getColumnInfos()) {
if (!colInfo.getIsVirtualCol()) {
- columnList.addColumn(new ExprNodeColumnDesc(colInfo.getType(),
- colInfo.getInternalName(), colInfo.getTabAlias(), false));
+ columnList.addColumn(toExprNodeDesc(colInfo));
}
}
}
@@ -1127,7 +1136,7 @@ public class TypeCheckProcFactory {
expr.getChildCount() - childrenBegin);
for (int ci = childrenBegin; ci < expr.getChildCount(); ci++) {
if (nodeOutputs[ci] instanceof ExprNodeColumnListDesc) {
- children.addAll(((ExprNodeColumnListDesc)nodeOutputs[ci]).getChildren());
+ children.addAll(((ExprNodeColumnListDesc) nodeOutputs[ci]).getChildren());
} else {
children.add((ExprNodeDesc) nodeOutputs[ci]);
}
@@ -1142,8 +1151,7 @@ public class TypeCheckProcFactory {
RowResolver input = ctx.getInputRR();
for (ColumnInfo colInfo : input.getColumnInfos()) {
if (!colInfo.getIsVirtualCol()) {
- children.add(new ExprNodeColumnDesc(colInfo.getType(),
- colInfo.getInternalName(), colInfo.getTabAlias(), false));
+ children.add(toExprNodeDesc(colInfo));
}
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnListDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnListDesc.java?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnListDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnListDesc.java Tue Dec 30 07:21:41 2014
@@ -30,16 +30,17 @@ import org.apache.hadoop.hive.serde2.typ
*/
public class ExprNodeColumnListDesc extends ExprNodeDesc {
- List<ExprNodeColumnDesc> columns = new ArrayList<ExprNodeColumnDesc>();
+ // column or constant
+ final List<ExprNodeDesc> columns = new ArrayList<ExprNodeDesc>();
- public void addColumn(ExprNodeColumnDesc column) {
+ public void addColumn(ExprNodeDesc column) {
columns.add(column);
}
@Override
public ExprNodeDesc clone() {
ExprNodeColumnListDesc clone = new ExprNodeColumnListDesc();
- clone.columns = new ArrayList<ExprNodeColumnDesc>(columns);
+ clone.columns.addAll(columns);
return clone;
}
@@ -73,11 +74,7 @@ public class ExprNodeColumnListDesc exte
@Override
public List<String> getCols() {
- List<String> cols = new ArrayList<String>();
- for (ExprNodeColumnDesc column : columns) {
- cols.add(column.getColumn());
- }
- return cols;
+ throw new IllegalStateException();
}
@Override
Modified: hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q Tue Dec 30 07:21:41 2014
@@ -91,3 +91,8 @@ set hive.cbo.enable=false;
explain
select percentile_approx(case when key < 100 then cast('NaN' as double) else key end, 0.5) from bucket;
select percentile_approx(case when key < 100 then cast('NaN' as double) else key end, 0.5) from bucket;
+
+-- with CBO
+explain
+select percentile_approx(key, 0.5) from bucket;
+select percentile_approx(key, 0.5) from bucket;
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out Tue Dec 30 07:21:41 2014
@@ -750,11 +750,9 @@ STAGE PLANS:
alias: alltypes_orc
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: 1 (type: int)
- outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
Group By Operator
- aggregations: count(_col0)
+ aggregations: count(1)
mode: hash
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join26.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join26.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join26.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join26.q.out Tue Dec 30 07:21:41 2014
@@ -70,11 +70,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join27.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join27.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join27.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join27.q.out Tue Dec 30 07:21:41 2014
@@ -115,21 +115,17 @@ STAGE PLANS:
0 _col0 (type: string)
1 _col0 (type: string)
Statistics: Num rows: 273 Data size: 2908 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 273 Data size: 2908 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col0)
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
TableScan
Union
Statistics: Num rows: 249 Data size: 2644 Basic stats: COMPLETE Column stats: NONE
@@ -140,21 +136,17 @@ STAGE PLANS:
0 _col0 (type: string)
1 _col0 (type: string)
Statistics: Num rows: 273 Data size: 2908 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 273 Data size: 2908 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col0)
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Local Work:
Map Reduce Local Work
Modified: hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out Tue Dec 30 07:21:41 2014
@@ -58,11 +58,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -167,11 +167,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -250,11 +250,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: length(key) (type: int), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: length(key) (type: int)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: int)
mode: hash
outputColumnNames: _col0, _col1
@@ -324,11 +324,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: abs(length(key)) (type: int), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: abs(length(key)) (type: int)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: int)
mode: hash
outputColumnNames: _col0, _col1
@@ -400,12 +400,12 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 3 (type: int), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
- keys: _col0 (type: string), _col1 (type: int)
+ aggregations: count(1)
+ keys: _col0 (type: string), 3 (type: int)
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
@@ -483,11 +483,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: value (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: value (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -564,11 +564,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -943,12 +943,12 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 3 (type: int), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
- keys: _col0 (type: string), _col1 (type: int)
+ aggregations: count(1)
+ keys: _col0 (type: string), 3 (type: int)
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
@@ -1055,11 +1055,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
bucketGroup: true
keys: _col0 (type: string)
mode: hash
@@ -1137,11 +1137,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: value (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: value (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -1218,11 +1218,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: key (type: string), value (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
bucketGroup: true
keys: _col0 (type: string), _col1 (type: string)
mode: hash
@@ -1352,11 +1352,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: key (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
bucketGroup: true
keys: _col0 (type: string)
mode: hash
@@ -1434,11 +1434,11 @@ STAGE PLANS:
alias: clustergroupby
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: value (type: string), key (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: value (type: string), key (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
Modified: hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/combine2.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/combine2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/combine2.q.out Tue Dec 30 07:21:41 2014
@@ -188,11 +188,9 @@ STAGE PLANS:
Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
- expressions: 1 (type: int)
- outputColumnNames: _col0
Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col0)
+ aggregations: count(1)
mode: hash
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
@@ -656,11 +654,11 @@ STAGE PLANS:
alias: srcpart
Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: ds (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: ds (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
Modified: hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out Tue Dec 30 07:21:41 2014
@@ -70,11 +70,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -231,13 +231,13 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: complete
outputColumnNames: _col0, _col1
@@ -379,11 +379,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -540,22 +540,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -706,33 +702,29 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: complete
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: complete
+ Select Operator
+ expressions: hash(_col0) (type: int), hash(_col1) (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: hash(_col0) (type: int), hash(_col1) (type: int)
+ Group By Operator
+ aggregations: sum(_col0), sum(_col1)
+ mode: hash
outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: sum(_col0), sum(_col1)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -845,11 +837,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -1000,13 +992,13 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: complete
outputColumnNames: _col0, _col1
@@ -1137,22 +1129,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -1291,22 +1279,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -1449,11 +1433,11 @@ STAGE PLANS:
outputColumnNames: _col1, _col2
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col2 (type: string), _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col2 (type: string), _col1 (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -1583,11 +1567,11 @@ STAGE PLANS:
outputColumnNames: _col1, _col2
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col2 (type: string), _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col2 (type: string), _col1 (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -1725,22 +1709,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -1880,33 +1860,29 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: complete
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: complete
+ Select Operator
+ expressions: hash(_col0) (type: int), hash(_col1) (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: hash(_col0) (type: int), hash(_col1) (type: int)
+ Group By Operator
+ aggregations: sum(_col0), sum(_col1)
+ mode: hash
outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: sum(_col0), sum(_col1)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -2019,11 +1995,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -2173,11 +2149,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -2335,11 +2311,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -2489,11 +2465,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -2652,11 +2628,11 @@ STAGE PLANS:
outputColumnNames: _col1, _col2
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), _col2 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col1 (type: string), _col2 (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -2813,11 +2789,11 @@ STAGE PLANS:
outputColumnNames: _col1, _col2
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), _col2 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col1 (type: string), _col2 (type: string)
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col2)
+ aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -2975,11 +2951,11 @@ STAGE PLANS:
outputColumnNames: _col2
Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col2 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col2 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -3135,11 +3111,11 @@ STAGE PLANS:
outputColumnNames: _col2
Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col2 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col2 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
Modified: hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer10.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer10.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer10.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer10.q.out Tue Dec 30 07:21:41 2014
@@ -81,22 +81,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -296,39 +292,35 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: complete
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: complete
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
- Statistics: Num rows: 276 Data size: 2854 Basic stats: COMPLETE Column stats: NONE
- Join Operator
- condition map:
- Left Semi Join 0 to 1
- keys:
- 0 _col0 (type: string)
- 1 _col0 (type: string)
+ Mux Operator
+ Statistics: Num rows: 276 Data size: 2854 Basic stats: COMPLETE Column stats: NONE
+ Join Operator
+ condition map:
+ Left Semi Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: bigint)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Mux Operator
Statistics: Num rows: 276 Data size: 2854 Basic stats: COMPLETE Column stats: NONE
Join Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out Tue Dec 30 07:21:41 2014
@@ -72,22 +72,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 108 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-4
Map Reduce
@@ -339,39 +335,35 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: complete
+ outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: complete
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Mux Operator
- Statistics: Num rows: 276 Data size: 2854 Basic stats: COMPLETE Column stats: NONE
- Join Operator
- condition map:
- Inner Join 0 to 1
- keys:
- 0 _col0 (type: string)
- 1 _col0 (type: string)
+ Mux Operator
+ Statistics: Num rows: 276 Data size: 2854 Basic stats: COMPLETE Column stats: NONE
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: string), _col2 (type: bigint), _col0 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col1 (type: string), _col2 (type: bigint), _col0 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out Tue Dec 30 07:21:41 2014
@@ -1570,22 +1570,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-3
Map Reduce
@@ -1790,22 +1786,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Group By Operator
keys: KEY._col0 (type: string)
mode: mergepartial
@@ -1821,22 +1813,18 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), 1 (type: int)
+ Group By Operator
+ aggregations: count(1)
+ keys: _col0 (type: string)
+ mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Group By Operator
- aggregations: count(_col1)
- keys: _col0 (type: string)
- mode: hash
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
Modified: hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out?rev=1648485&r1=1648484&r2=1648485&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out Tue Dec 30 07:21:41 2014
@@ -200,11 +200,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -398,13 +398,13 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: complete
outputColumnNames: _col0, _col1
@@ -600,11 +600,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -916,11 +916,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -1114,13 +1114,13 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Mux Operator
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: complete
outputColumnNames: _col0, _col1
@@ -1316,11 +1316,11 @@ STAGE PLANS:
outputColumnNames: _col1
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: string), 1 (type: int)
- outputColumnNames: _col0, _col1
+ expressions: _col1 (type: string)
+ outputColumnNames: _col0
Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
Group By Operator
- aggregations: count(_col1)
+ aggregations: count(1)
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1