You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/06/15 07:43:29 UTC
svn commit: r784656 [2/30] - in /hadoop/hive/trunk: ./
common/src/java/org/apache/hadoop/hive/conf/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/
ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/or...
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Mon Jun 15 05:43:22 2009
@@ -155,6 +155,18 @@
/**
* Convert the ColumnList to FieldSchema list.
*/
+ public static List<FieldSchema> getFieldSchemasFromColumnList(List<exprNodeDesc> cols, List<String> outputColumnNames, int start,
+ String fieldPrefix) {
+ List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
+ for (int i=0; i<cols.size(); i++) {
+ schemas.add(MetaStoreUtils.getFieldSchemaFromTypeInfo(fieldPrefix + outputColumnNames.get(i+start), cols.get(i).getTypeInfo()));
+ }
+ return schemas;
+ }
+
+ /**
+ * Convert the ColumnList to FieldSchema list.
+ */
public static List<FieldSchema> getFieldSchemasFromColumnList(List<exprNodeDesc> cols,
String fieldPrefix) {
List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
@@ -187,10 +199,23 @@
return schemas;
}
+ public static List<FieldSchema> sortFieldSchemas(List<FieldSchema> schema) {
+ Collections.sort(schema, new Comparator<FieldSchema>(){
+
+ @Override
+ public int compare(FieldSchema o1, FieldSchema o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+
+ });
+ return schema;
+ }
+
/**
* Create the reduce sink descriptor.
* @param keyCols The columns to be stored in the key
* @param valueCols The columns to be stored in the value
+ * @param outputColumnNames The output columns names
* @param tag The tag for this reducesink
* @param partitionCols The columns for partitioning.
* @param numReducers The number of reducers, set to -1 for automatic inference
@@ -198,22 +223,45 @@
* @return The reduceSinkDesc object.
*/
public static reduceSinkDesc getReduceSinkDesc(ArrayList<exprNodeDesc> keyCols,
- ArrayList<exprNodeDesc> valueCols,
+ ArrayList<exprNodeDesc> valueCols,
+ List<String> outputColumnNames,
+ boolean includeKeyCols,
int tag,
ArrayList<exprNodeDesc> partitionCols,
String order,
int numReducers) {
-
- return new reduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers,
- getBinarySortableTableDesc(getFieldSchemasFromColumnList(keyCols, "reducesinkkey"), order),
- // Revert to DynamicSerDe: getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
- getLazySimpleSerDeTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
+ tableDesc keyTable = null;
+ tableDesc valueTable = null;
+ ArrayList<String> outputKeyCols = new ArrayList<String>();
+ ArrayList<String> outputValCols = new ArrayList<String>();
+ if (includeKeyCols) {
+ keyTable = getBinarySortableTableDesc(getFieldSchemasFromColumnList(
+ keyCols, outputColumnNames, 0, ""), order);
+ outputKeyCols.addAll(outputColumnNames.subList(0, keyCols.size()));
+ valueTable = getLazySimpleSerDeTableDesc(getFieldSchemasFromColumnList(
+ valueCols, outputColumnNames, keyCols.size(), ""));
+ outputValCols.addAll(outputColumnNames.subList(keyCols.size(), outputColumnNames.size()));
+ } else {
+ keyTable = getBinarySortableTableDesc(getFieldSchemasFromColumnList(
+ keyCols, "reducesinkkey"), order);
+ for (int i = 0; i < keyCols.size(); i++) {
+ outputKeyCols.add("reducesinkkey"+i);
+ }
+ valueTable = getLazySimpleSerDeTableDesc(getFieldSchemasFromColumnList(
+ valueCols, outputColumnNames, 0, ""));
+ outputValCols.addAll(outputColumnNames);
+ }
+ return new reduceSinkDesc(keyCols, valueCols, outputKeyCols, outputValCols, tag, partitionCols, numReducers,
+ keyTable,
+ // Revert to DynamicSerDe: getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
+ valueTable);
}
/**
* Create the reduce sink descriptor.
* @param keyCols The columns to be stored in the key
* @param valueCols The columns to be stored in the value
+ * @param outputColumnNames The output columns names
* @param tag The tag for this reducesink
* @param numPartitionFields The first numPartitionFields of keyCols will be partition columns.
* If numPartitionFields=-1, then partition randomly.
@@ -221,11 +269,10 @@
* based on input data size.
* @return The reduceSinkDesc object.
*/
- public static reduceSinkDesc getReduceSinkDesc(ArrayList<exprNodeDesc> keyCols,
- ArrayList<exprNodeDesc> valueCols,
- int tag,
- int numPartitionFields,
- int numReducers) {
+ public static reduceSinkDesc getReduceSinkDesc(
+ ArrayList<exprNodeDesc> keyCols, ArrayList<exprNodeDesc> valueCols,
+ List<String> outputColumnNames, boolean includeKey, int tag, int numPartitionFields,
+ int numReducers) {
ArrayList<exprNodeDesc> partitionCols = null;
if (numPartitionFields >= keyCols.size()) {
@@ -245,10 +292,9 @@
for (int i=0; i<keyCols.size(); i++) {
order.append("+");
}
- return getReduceSinkDesc(keyCols, valueCols, tag, partitionCols, order.toString(),
- numReducers);
+ return getReduceSinkDesc(keyCols, valueCols, outputColumnNames, includeKey, tag, partitionCols, order.toString(),
+ numReducers);
}
-
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java Mon Jun 15 05:43:22 2009
@@ -34,14 +34,17 @@
private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators;
private java.util.ArrayList<String> evalMethods;
private java.util.ArrayList<String> aggMethods;
+ private java.util.ArrayList<java.lang.String> outputColumnNames;
public groupByDesc() { }
public groupByDesc(
final Mode mode,
+ final java.util.ArrayList<java.lang.String> outputColumnNames,
final java.util.ArrayList<exprNodeDesc> keys,
final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
final java.util.ArrayList<String> evalMethods,
final java.util.ArrayList<String> aggMethods) {
this.mode = mode;
+ this.outputColumnNames = outputColumnNames;
this.keys = keys;
this.aggregators = aggregators;
this.evalMethods = evalMethods;
@@ -80,6 +83,14 @@
this.keys = keys;
}
+ public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
+ return outputColumnNames;
+ }
+ public void setOutputColumnNames(
+ java.util.ArrayList<java.lang.String> outputColumnNames) {
+ this.outputColumnNames = outputColumnNames;
+ }
+
@explain(displayName="aggregations")
public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> getAggregators() {
return this.aggregators;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java Mon Jun 15 05:43:22 2009
@@ -41,6 +41,8 @@
// alias to key mapping
private Map<Byte, List<exprNodeDesc>> exprs;
+ protected java.util.ArrayList<java.lang.String> outputColumnNames;
+
// No outer join involved
protected boolean noOuterJoin;
@@ -48,20 +50,23 @@
public joinDesc() { }
- public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, final boolean noOuterJoin, final joinCond[] conds) {
+ public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames, final boolean noOuterJoin, final joinCond[] conds) {
this.exprs = exprs;
+ this.outputColumnNames = outputColumnNames;
this.noOuterJoin = noOuterJoin;
this.conds = conds;
}
- public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs) {
+ public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames) {
this.exprs = exprs;
+ this.outputColumnNames = outputColumnNames;
this.noOuterJoin = true;
this.conds = null;
}
- public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, final joinCond[] conds) {
+ public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames, final joinCond[] conds) {
this.exprs = exprs;
+ this.outputColumnNames = outputColumnNames;
this.noOuterJoin = false;
this.conds = conds;
}
@@ -102,6 +107,15 @@
public void setExprs(final Map<Byte, List<exprNodeDesc>> exprs) {
this.exprs = exprs;
}
+
+ public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
+ return outputColumnNames;
+ }
+
+ public void setOutputColumnNames(
+ java.util.ArrayList<java.lang.String> outputColumnNames) {
+ this.outputColumnNames = outputColumnNames;
+ }
public boolean getNoOuterJoin() {
return this.noOuterJoin;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java Mon Jun 15 05:43:22 2009
@@ -21,6 +21,8 @@
import java.io.Serializable;
import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -44,9 +46,10 @@
final tableDesc keyTblDesc,
final Map<Byte, List<exprNodeDesc>> values,
final List<tableDesc> valueTblDescs,
+ ArrayList<String> outputColumnNames,
final int posBigTable,
final joinCond[] conds) {
- super(values, conds);
+ super(values, outputColumnNames, conds);
this.keys = keys;
this.keyTblDesc = keyTblDesc;
this.valueTblDescs = valueTblDescs;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java Mon Jun 15 05:43:22 2009
@@ -27,10 +27,12 @@
* Key columns are passed to reducer in the "key".
*/
private java.util.ArrayList<exprNodeDesc> keyCols;
+ private java.util.ArrayList<java.lang.String> outputKeyColumnNames;
/**
* Value columns are passed to reducer in the "value".
*/
private java.util.ArrayList<exprNodeDesc> valueCols;
+ private java.util.ArrayList<java.lang.String> outputValueColumnNames;
/**
* Describe how to serialize the key.
*/
@@ -59,6 +61,8 @@
public reduceSinkDesc
(java.util.ArrayList<exprNodeDesc> keyCols,
java.util.ArrayList<exprNodeDesc> valueCols,
+ java.util.ArrayList<java.lang.String> outputKeyColumnNames,
+ java.util.ArrayList<java.lang.String> outputValueolumnNames,
int tag,
java.util.ArrayList<exprNodeDesc> partitionCols,
int numReducers,
@@ -66,6 +70,8 @@
final tableDesc valueSerializeInfo) {
this.keyCols = keyCols;
this.valueCols = valueCols;
+ this.outputKeyColumnNames = outputKeyColumnNames;
+ this.outputValueColumnNames = outputValueolumnNames;
this.tag = tag;
this.numReducers = numReducers;
this.partitionCols = partitionCols;
@@ -73,6 +79,24 @@
this.valueSerializeInfo = valueSerializeInfo;
}
+ public java.util.ArrayList<java.lang.String> getOutputKeyColumnNames() {
+ return outputKeyColumnNames;
+ }
+
+ public void setOutputKeyColumnNames(
+ java.util.ArrayList<java.lang.String> outputKeyColumnNames) {
+ this.outputKeyColumnNames = outputKeyColumnNames;
+ }
+
+ public java.util.ArrayList<java.lang.String> getOutputValueColumnNames() {
+ return outputValueColumnNames;
+ }
+
+ public void setOutputValueColumnNames(
+ java.util.ArrayList<java.lang.String> outputValueColumnNames) {
+ this.outputValueColumnNames = outputValueColumnNames;
+ }
+
@explain(displayName="key expressions")
public java.util.ArrayList<exprNodeDesc> getKeyCols() {
return this.keyCols;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java Mon Jun 15 05:43:22 2009
@@ -24,6 +24,7 @@
public class selectDesc implements Serializable {
private static final long serialVersionUID = 1L;
private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList;
+ private java.util.ArrayList<java.lang.String> outputColumnNames;
private boolean selectStar;
private boolean selStarNoCompute;
public selectDesc() { }
@@ -32,14 +33,15 @@
this.selStarNoCompute = selStarNoCompute;
}
- public selectDesc(final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
- this(colList, false);
+ public selectDesc(final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList, final java.util.ArrayList<java.lang.String> outputColumnNames) {
+ this(colList, outputColumnNames, false);
}
public selectDesc(
- final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList, final boolean selectStar) {
+ final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList,java.util.ArrayList<java.lang.String> outputColumnNames, final boolean selectStar) {
this.colList = colList;
this.selectStar = selectStar;
+ this.outputColumnNames = outputColumnNames;
}
public selectDesc(
@@ -58,6 +60,14 @@
this.colList=colList;
}
+ public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
+ return outputColumnNames;
+ }
+ public void setOutputColumnNames(
+ java.util.ArrayList<java.lang.String> outputColumnNames) {
+ this.outputColumnNames = outputColumnNames;
+ }
+
/**
* @return the selectStar
*/
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Mon Jun 15 05:43:22 2009
@@ -223,11 +223,14 @@
private void populateMapRedPlan1(Table src) {
mr.setNumReduceTasks(Integer.valueOf(1));
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("key")),
- Utilities.makeList(getStringColumn("value")), -1, 1, -1));
+ Utilities.makeList(getStringColumn("value")), outputColumns, true, -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -247,13 +250,15 @@
@SuppressWarnings("unchecked")
private void populateMapRedPlan2(Table src) {
mr.setNumReduceTasks(Integer.valueOf(1));
-
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("key")),
Utilities.makeList(getStringColumn("key"),
- getStringColumn("value")), -1, 1, -1));
+ getStringColumn("value")), outputColumns, false, -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -280,13 +285,15 @@
private void populateMapRedPlan3(Table src, Table src2) {
mr.setNumReduceTasks(Integer.valueOf(5));
mr.setNeedsTagging(true);
-
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("key")),
Utilities.makeList
- (getStringColumn("value")), Byte.valueOf((byte)0), 1, -1));
+ (getStringColumn("value")), outputColumns, true, Byte.valueOf((byte)0), 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -296,6 +303,7 @@
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("key")),
Utilities.makeList(getStringColumn("key")),
+ outputColumns, true,
Byte.valueOf((byte)1),
Integer.MAX_VALUE, -1));
@@ -316,7 +324,7 @@
TypeInfoFactory.stringTypeInfo),
Utilities.ReduceField.VALUE.toString()),
"0",
- false))), op4);
+ false)), outputColumns), op4);
mr.setReducer(op5);
}
@@ -326,12 +334,15 @@
mr.setNumReduceTasks(Integer.valueOf(1));
// map-side work
-
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("tkey")),
Utilities.makeList(getStringColumn("tkey"),
getStringColumn("tvalue")),
+ outputColumns, false,
-1, 1, -1));
Operator<scriptDesc> op0 = OperatorFactory.get
@@ -342,7 +353,7 @@
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
Utilities.makeList(getStringColumn("key"),
- getStringColumn("value"))), op0);
+ getStringColumn("value")), outputColumns), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -368,17 +379,20 @@
mr.setNumReduceTasks(Integer.valueOf(1));
// map-side work
-
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
Operator<reduceSinkDesc> op0 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
(Utilities.makeList(getStringColumn("0")),
Utilities.makeList(getStringColumn("0"),
getStringColumn("1")),
+ outputColumns, false,
-1, 1, -1));
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
Utilities.makeList(getStringColumn("key"),
- getStringColumn("value"))), op0);
+ getStringColumn("value")), outputColumns), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
@@ -400,12 +414,15 @@
mr.setNumReduceTasks(Integer.valueOf(1));
// map-side work
-
+ ArrayList<String> outputColumns = new ArrayList<String>();
+ for (int i = 0; i < 2; i++)
+ outputColumns.add("_col" + i);
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc(
Utilities.makeList(getStringColumn("tkey")),
Utilities.makeList(getStringColumn("tkey"),
getStringColumn("tvalue")),
+ outputColumns, false,
-1, 1, -1));
Operator<scriptDesc> op0 = OperatorFactory.get
@@ -416,7 +433,7 @@
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
Utilities.makeList(getStringColumn("key"),
- getStringColumn("value"))), op0);
+ getStringColumn("value")), outputColumns), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Mon Jun 15 05:43:22 2009
@@ -130,7 +130,10 @@
ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
earr.add(exprDesc1);
earr.add(exprDesc2);
- selectDesc selectCtx = new selectDesc(earr);
+ ArrayList<String> outputCols = new ArrayList<String>();
+ for (int i = 0; i < earr.size(); i++)
+ outputCols.add("_col"+i);
+ selectDesc selectCtx = new selectDesc(earr, outputCols);
Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
op.setConf(selectCtx);
@@ -171,7 +174,10 @@
ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
earr.add(exprDesc1);
earr.add(exprDesc2);
- selectDesc selectCtx = new selectDesc(earr);
+ ArrayList<String> outputCols = new ArrayList<String>();
+ for (int i = 0; i < earr.size(); i++)
+ outputCols.add("_col"+i);
+ selectDesc selectCtx = new selectDesc(earr, outputCols);
Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
op.setConf(selectCtx);
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
Files hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out (original) and hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out Mon Jun 15 05:43:22 2009 differ
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Mon Jun 15 05:43:22 2009
@@ -14,30 +14,24 @@
Map Reduce
Alias -> Map Operator Tree:
src_thrift
- Select Operator
- expressions:
- expr: lint
- type: array<int>
- expr: lintstring
- type: array<struct<myint:int,mystring:string,underscore_int:int>>
- Filter Operator
- predicate:
- expr: (0[0] > 0)
- type: boolean
- Select Operator
- expressions:
- expr: 0[1]
- type: int
- expr: 1[0].MYSTRING
- type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Filter Operator
+ predicate:
+ expr: (lint[0] > 0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: lint[1]
+ type: int
+ expr: lintstring[0].MYSTRING
+ type: string
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -56,7 +50,7 @@
Output: default/dest1
query: SELECT DEST1.* FROM Dest1
Input: default/dest1
-Output: /data/users/zshao/tools/495-trunk-apache-hive/ql/../build/ql/tmp/154752222/89798452.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/422322731/10000
2 1
4 8
6 27
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Mon Jun 15 05:43:22 2009
@@ -13,38 +13,34 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Select Operator
- expressions:
- expr: key
- type: string
- Filter Operator
- predicate:
- expr: (UDFToDouble(0) = UDFToDouble(86))
- type: boolean
- Select Operator
- expressions:
- expr: (3 + 2)
- type: int
- expr: (3.0 + UDFToDouble(2))
- type: double
- expr: (UDFToDouble(3) + 2.0)
- type: double
- expr: (3.0 + 2.0)
- type: double
- expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0)))
- type: int
- expr: UDFToBoolean(1)
- type: boolean
- expr: UDFToInteger(true)
- type: int
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Filter Operator
+ predicate:
+ expr: (UDFToDouble(key) = UDFToDouble(86))
+ type: boolean
+ Select Operator
+ expressions:
+ expr: (3 + 2)
+ type: int
+ expr: (3.0 + UDFToDouble(2))
+ type: double
+ expr: (UDFToDouble(3) + 2.0)
+ type: double
+ expr: (3.0 + 2.0)
+ type: double
+ expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0)))
+ type: int
+ expr: UDFToBoolean(1)
+ type: boolean
+ expr: UDFToInteger(true)
+ type: int
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -62,5 +58,5 @@
Output: default/dest1
query: select dest1.* FROM dest1
Input: default/dest1
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/351550201/345617503.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1835720350/10000
5 5.0 5.0 5.0 5 true 1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Mon Jun 15 05:43:22 2009
@@ -24,17 +24,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -52,7 +52,7 @@
query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/214678633/80263680.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/775179415/10000
10 val_10
query: EXPLAIN
SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
@@ -80,17 +80,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -108,7 +108,7 @@
query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/216341669/198536245.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1647829108/10000
20 val_20
query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
@@ -136,17 +136,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -164,7 +164,7 @@
query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/323533340/5794695.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/365690364/10000
20 val_20
query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -192,17 +192,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -220,7 +220,7 @@
query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/1217098935/331281713.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1392867261/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
@@ -248,17 +248,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -276,7 +276,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/31096986/775877790.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1057006857/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -304,17 +304,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -332,7 +332,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/1311539054/408350350.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1855630481/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
@@ -360,17 +360,17 @@
type: string
Reduce Output Operator
key expressions:
- expr: 1
+ expr: _col1
type: string
sort order: +
Map-reduce partition columns:
- expr: 1
+ expr: _col1
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -388,7 +388,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/410641125/337645701.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1870112839/10000
20 val_20
query: EXPLAIN
SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
@@ -412,29 +412,29 @@
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
Filter Operator
predicate:
- expr: (UDFToDouble(0) = UDFToDouble(20))
+ expr: (UDFToDouble(_col0) = UDFToDouble(20))
type: boolean
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
File Output Operator
compressed: false
@@ -450,7 +450,7 @@
query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/20608200/132489807.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/980077979/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -467,22 +467,20 @@
Map Reduce
Alias -> Map Operator Tree:
y
- Select Operator
- expressions:
+ Reduce Output Operator
+ key expressions:
expr: key
type: string
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
- type: string
- tag: 1
- value expressions:
- expr: 0
- type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
x
Reduce Output Operator
key expressions:
@@ -503,19 +501,19 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE.0} {VALUE.1}
- 1 {VALUE.0}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
Filter Operator
predicate:
- expr: (UDFToDouble(0) = UDFToDouble(20))
+ expr: (UDFToDouble(_col0) = UDFToDouble(20))
type: boolean
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
File Output Operator
compressed: false
@@ -528,22 +526,22 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/180519365/1223155.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/666021615/10002
Reduce Output Operator
key expressions:
- expr: 1
+ expr: _col1
type: string
sort order: +
Map-reduce partition columns:
- expr: 1
+ expr: _col1
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
Reduce Operator Tree:
Extract
@@ -561,7 +559,7 @@
query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/304705152/1520883527.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1347314676/10000
20 val_20 20
query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -612,21 +610,21 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE.0} {VALUE.1}
- 1 {VALUE.0} {VALUE.1}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
Filter Operator
predicate:
- expr: (UDFToDouble(0) = UDFToDouble(20))
+ expr: (UDFToDouble(_col0) = UDFToDouble(20))
type: boolean
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
- expr: 3
+ expr: _col3
type: string
File Output Operator
compressed: false
@@ -639,24 +637,24 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/273445284/1259872115.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/37635850/10002
Reduce Output Operator
key expressions:
- expr: 1
+ expr: _col1
type: string
sort order: +
Map-reduce partition columns:
- expr: 1
+ expr: _col1
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
- expr: 3
+ expr: _col3
type: string
Reduce Operator Tree:
Extract
@@ -674,7 +672,7 @@
query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/224961014/306327097.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/954552739/10000
20 val_20 20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
@@ -725,21 +723,21 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE.0} {VALUE.1}
- 1 {VALUE.0} {VALUE.1}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
Filter Operator
predicate:
- expr: (UDFToDouble(0) = UDFToDouble(20))
+ expr: (UDFToDouble(_col0) = UDFToDouble(20))
type: boolean
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
- expr: 3
+ expr: _col3
type: string
File Output Operator
compressed: false
@@ -752,24 +750,24 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/389423102/202869602.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/950425430/10002
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
- expr: 3
+ expr: _col3
type: string
Reduce Operator Tree:
Extract
@@ -787,7 +785,7 @@
query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/171221765/388816243.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/245227185/10000
20 val_20 20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
@@ -804,22 +802,20 @@
Map Reduce
Alias -> Map Operator Tree:
y
- Select Operator
- expressions:
+ Reduce Output Operator
+ key expressions:
expr: key
type: string
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
- type: string
- tag: 1
- value expressions:
- expr: 0
- type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
x
Reduce Output Operator
key expressions:
@@ -840,19 +836,19 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE.0} {VALUE.1}
- 1 {VALUE.0}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
Filter Operator
predicate:
- expr: (UDFToDouble(0) = UDFToDouble(20))
+ expr: (UDFToDouble(_col0) = UDFToDouble(20))
type: boolean
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
File Output Operator
compressed: false
@@ -865,22 +861,22 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/399836187/1625046097.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1181511478/10002
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
- expr: 2
+ expr: _col2
type: string
Reduce Operator Tree:
Extract
@@ -898,7 +894,7 @@
query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/667900999/93186085.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/350754453/10000
20 val_20 20
query: EXPLAIN
SELECT unioninput.*
@@ -933,23 +929,23 @@
Union
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
null-subquery2:unioninput-subquery2:src
Filter Operator
@@ -965,23 +961,23 @@
Union
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: string
Reduce Operator Tree:
Extract
@@ -1005,7 +1001,7 @@
) unioninput
CLUSTER BY unioninput.key
Input: default/src
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/730558416/61341521.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1288179627/10000
0 val_0
0 val_0
0 val_0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out Mon Jun 15 05:43:22 2009
@@ -27,35 +27,23 @@
type: int
expr: astring
type: string
- Select Operator
- expressions:
- expr: 0
- type: array<int>
+ Reduce Output Operator
+ sort order:
+ Map-reduce partition columns:
expr: 1
+ type: int
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: array<int>
+ expr: _col1
type: array<string>
- expr: 2
+ expr: _col2
type: map<string,string>
- expr: 3
+ expr: _col3
type: int
- expr: 4
+ expr: _col4
type: string
- Reduce Output Operator
- sort order:
- Map-reduce partition columns:
- expr: 1
- type: int
- tag: -1
- value expressions:
- expr: 0
- type: array<int>
- expr: 1
- type: array<string>
- expr: 2
- type: map<string,string>
- expr: 3
- type: int
- expr: 4
- type: string
Reduce Operator Tree:
Extract
File Output Operator
@@ -84,7 +72,7 @@
Output: default/columnarserde_create_shortcut
query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1
Input: default/columnarserde_create_shortcut
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/171037815/113357858.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/761844554/10000
[0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0
[1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1
[2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2
@@ -98,7 +86,7 @@
null null {} 0 NULL
query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1
Input: default/columnarserde_create_shortcut
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/703291409/322213020.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1537792942/10000
0 0 NULL 1712634731 record_0
1 10 NULL 465985200 record_1
2 20 NULL -751827638 record_2
@@ -122,7 +110,7 @@
value string from deserializer
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1270707165/71798862.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1192618903/10000
238 val_238
86 val_86
311 val_311
@@ -136,7 +124,7 @@
query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string)
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/584823418/366926580.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/811310101/10000
238 val_238 NULL
86 val_86 NULL
311 val_311 NULL
@@ -150,7 +138,7 @@
query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int)
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/420088542/417977156.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1699900771/10000
238
86
311
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Mon Jun 15 05:43:22 2009
@@ -14,24 +14,30 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Reduce Output Operator
- key expressions:
+ Select Operator
+ expressions:
expr: key
type: string
- sort order: +
- Map-reduce partition columns:
- expr: rand()
- type: double
- tag: -1
- value expressions:
- expr: substr(value, 5)
+ expr: value
type: string
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: substr(value, 5)
+ type: string
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(UDFToDouble(VALUE.0))
+ expr: sum(UDFToDouble(VALUE._col0))
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: partial1
File Output Operator
@@ -45,38 +51,38 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/3613518/365997197.10002
+ invalidscheme:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/937548090/10002
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 1
+ expr: _col1
type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: final
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -103,7 +109,7 @@
Output: default/dest_g1
query: SELECT dest_g1.* FROM dest_g1
Input: default/dest_g1
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/233907446/231615542.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/88746984/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out Mon Jun 15 05:43:22 2009
@@ -14,38 +14,44 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Group By Operator
- aggregations:
- expr: sum(UDFToDouble(substr(value, 5)))
- keys:
+ Select Operator
+ expressions:
expr: key
type: string
- mode: hash
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
+ expr: value
+ type: string
+ Group By Operator
+ aggregations:
+ expr: sum(UDFToDouble(substr(value, 5)))
+ keys:
+ expr: key
type: string
- tag: -1
- value expressions:
- expr: 1
- type: double
+ mode: hash
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: mergepartial
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Limit
File Output Operator
@@ -59,23 +65,23 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/77797555/24296274.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1992539150/10002
Reduce Output Operator
sort order:
tag: -1
value expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Reduce Operator Tree:
Extract
Limit
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -102,7 +108,7 @@
Output: default/dest1
query: SELECT dest1.* FROM dest1
Input: default/dest1
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/1165028443/472212754.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/45450358/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out Mon Jun 15 05:43:22 2009
@@ -13,44 +13,50 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Group By Operator
- aggregations:
- expr: sum(UDFToDouble(substr(value, 5)))
- keys:
+ Select Operator
+ expressions:
expr: key
type: string
- mode: hash
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
+ expr: value
+ type: string
+ Group By Operator
+ aggregations:
+ expr: sum(UDFToDouble(substr(value, 5)))
+ keys:
+ expr: key
type: string
- tag: -1
- value expressions:
- expr: 1
- type: double
+ mode: hash
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: mergepartial
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -77,7 +83,7 @@
Output: default/dest1
query: SELECT dest1.* FROM dest1
Input: default/dest1
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/3018978/154486697.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/839465097/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out Mon Jun 15 05:43:22 2009
@@ -13,44 +13,50 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Group By Operator
- aggregations:
- expr: sum(UDFToDouble(substr(value, 5)))
- keys:
+ Select Operator
+ expressions:
expr: key
type: string
- mode: hash
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
+ expr: value
+ type: string
+ Group By Operator
+ aggregations:
+ expr: sum(UDFToDouble(substr(value, 5)))
+ keys:
+ expr: key
type: string
- tag: -1
- value expressions:
- expr: 1
- type: double
+ mode: hash
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: mergepartial
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -77,7 +83,7 @@
Output: default/dest1
query: SELECT dest1.* FROM dest1
Input: default/dest1
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/146895803/933309003.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/476967695/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out Mon Jun 15 05:43:22 2009
@@ -14,31 +14,37 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Group By Operator
- aggregations:
- expr: sum(UDFToDouble(substr(value, 5)))
- keys:
+ Select Operator
+ expressions:
expr: key
type: string
- mode: hash
- Reduce Output Operator
- key expressions:
- expr: 0
+ expr: value
+ type: string
+ Group By Operator
+ aggregations:
+ expr: sum(UDFToDouble(substr(value, 5)))
+ keys:
+ expr: key
type: string
- sort order: +
- Map-reduce partition columns:
- expr: rand()
- type: double
- tag: -1
- value expressions:
- expr: 1
- type: double
+ mode: hash
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: partial2
File Output Operator
@@ -52,38 +58,38 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/1694296539/661835976.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1415819572/10002
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 1
+ expr: _col1
type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: final
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -110,7 +116,7 @@
Output: default/dest1
query: SELECT dest1.* FROM dest1
Input: default/dest1
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/24366777/439991944.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/636255754/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out Mon Jun 15 05:43:22 2009
@@ -13,37 +13,43 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
+ Select Operator
+ expressions:
expr: key
type: string
- tag: -1
- value expressions:
- expr: substr(value, 5)
+ expr: value
type: string
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ value expressions:
+ expr: substr(value, 5)
+ type: string
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(UDFToDouble(VALUE.0))
+ expr: sum(UDFToDouble(VALUE._col0))
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: complete
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Select Operator
expressions:
- expr: UDFToInteger(0)
+ expr: UDFToInteger(_col0)
type: int
- expr: 1
+ expr: _col1
type: double
File Output Operator
compressed: false
@@ -70,7 +76,7 @@
Output: default/dest_g1
query: SELECT dest_g1.* FROM dest_g1
Input: default/dest_g1
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/36429349/823891391.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1415508787/10000
0 0.0
10 10.0
100 200.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out Mon Jun 15 05:43:22 2009
@@ -15,26 +15,32 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Reduce Output Operator
- key expressions:
- expr: substr(key, 1, 1)
- type: string
- expr: substr(value, 5)
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: substr(key, 1, 1)
+ Select Operator
+ expressions:
+ expr: key
type: string
- expr: substr(value, 5)
+ expr: value
type: string
- tag: -1
+ Reduce Output Operator
+ key expressions:
+ expr: substr(key, 1, 1)
+ type: string
+ expr: substr(value, 5)
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: substr(key, 1, 1)
+ type: string
+ expr: substr(value, 5)
+ type: string
+ tag: -1
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: count(DISTINCT KEY.1)
- expr: sum(UDFToDouble(KEY.1))
+ expr: count(DISTINCT KEY._col1)
+ expr: sum(UDFToDouble(KEY._col1))
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: partial1
File Output Operator
@@ -48,45 +54,45 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive2/build/ql/tmp/485798928/730742911.10002
+ file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1233260/10002
Reduce Output Operator
key expressions:
- expr: 0
+ expr: _col0
type: string
sort order: +
Map-reduce partition columns:
- expr: 0
+ expr: _col0
type: string
tag: -1
value expressions:
- expr: 1
+ expr: _col1
type: bigint
- expr: 2
+ expr: _col2
type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: count(VALUE.0)
- expr: sum(VALUE.1)
+ expr: count(VALUE._col0)
+ expr: sum(VALUE._col1)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: final
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: bigint
- expr: concat(0, UDFToString(2))
+ expr: concat(_col0, UDFToString(_col2))
type: string
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: UDFToInteger(1)
+ expr: UDFToInteger(_col1)
type: int
- expr: 2
+ expr: _col2
type: string
File Output Operator
compressed: false
@@ -114,7 +120,7 @@
Output: default/dest_g2
query: SELECT dest_g2.* FROM dest_g2
Input: default/dest_g2
-Output: /data/users/njain/hive2/hive2/ql/../build/ql/tmp/5024073/19494424.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/162017745/10000
0 1 00.0
1 71 116414.0
2 69 225571.0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out?rev=784656&r1=784655&r2=784656&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out Mon Jun 15 05:43:22 2009
@@ -12,38 +12,44 @@
Map Reduce
Alias -> Map Operator Tree:
src
- Group By Operator
- aggregations:
- expr: sum(UDFToDouble(substr(value, 5)))
- keys:
+ Select Operator
+ expressions:
expr: key
type: string
- mode: hash
- Reduce Output Operator
- key expressions:
- expr: 0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: 0
+ expr: value
+ type: string
+ Group By Operator
+ aggregations:
+ expr: sum(UDFToDouble(substr(value, 5)))
+ keys:
+ expr: key
type: string
- tag: -1
- value expressions:
- expr: 1
- type: double
+ mode: hash
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: double
Reduce Operator Tree:
Group By Operator
aggregations:
- expr: sum(VALUE.0)
+ expr: sum(VALUE._col0)
keys:
- expr: KEY.0
+ expr: KEY._col0
type: string
mode: mergepartial
Select Operator
expressions:
- expr: 0
+ expr: _col0
type: string
- expr: 1
+ expr: _col1
type: double
Limit
File Output Operator
@@ -60,7 +66,7 @@
query: SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key LIMIT 5
Input: default/src
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/971888258/854485877.10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/165818417/10000
0 0.0
10 10.0
100 200.0