You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/04/15 00:54:45 UTC
svn commit: r764994 [5/12] - in /hadoop/hive/trunk: ./ data/conf/
eclipse-templates/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/
ql/lib/ ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/...
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Apr 14 22:54:39 2009
@@ -38,6 +38,7 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -147,19 +148,19 @@
private filterDesc getTestFilterDesc(String column) {
ArrayList<exprNodeDesc> children1 = new ArrayList<exprNodeDesc>();
- children1.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column));
+ children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column));
exprNodeDesc lhs = new exprNodeFuncDesc(
- TypeInfoFactory.getPrimitiveTypeInfo(Double.class),
- FunctionRegistry.getUDFClass(Double.class.getName()),
- FunctionRegistry.getUDFMethod(Double.class.getName(), String.class),
+ TypeInfoFactory.doubleTypeInfo,
+ FunctionRegistry.getUDFClass(Constants.DOUBLE_TYPE_NAME),
+ FunctionRegistry.getUDFMethod(Constants.DOUBLE_TYPE_NAME, TypeInfoFactory.stringTypeInfo),
children1);
ArrayList<exprNodeDesc> children2 = new ArrayList<exprNodeDesc>();
- children2.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Long.class), Long.valueOf(100)));
+ children2.add(new exprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long.valueOf(100)));
exprNodeDesc rhs = new exprNodeFuncDesc(
- TypeInfoFactory.getPrimitiveTypeInfo(Double.class),
- FunctionRegistry.getUDFClass(Double.class.getName()),
- FunctionRegistry.getUDFMethod(Double.class.getName(), Long.class),
+ TypeInfoFactory.doubleTypeInfo,
+ FunctionRegistry.getUDFClass(Constants.DOUBLE_TYPE_NAME),
+ FunctionRegistry.getUDFMethod(Constants.DOUBLE_TYPE_NAME, TypeInfoFactory.longTypeInfo),
children2);
ArrayList<exprNodeDesc> children3 = new ArrayList<exprNodeDesc>();
@@ -167,9 +168,9 @@
children3.add(rhs);
exprNodeDesc desc = new exprNodeFuncDesc(
- TypeInfoFactory.getPrimitiveTypeInfo(Boolean.class),
+ TypeInfoFactory.booleanTypeInfo,
FunctionRegistry.getUDFClass("<"),
- FunctionRegistry.getUDFMethod("<", Double.class, Double.class),
+ FunctionRegistry.getUDFMethod("<", TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.doubleTypeInfo),
children3);
return new filterDesc(desc);
@@ -218,8 +219,8 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), -1, 1, -1));
+ (Utilities.makeList(getStringColumn("key")),
+ Utilities.makeList(getStringColumn("value")), -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -231,7 +232,7 @@
Utilities.defaultTd, false));
Operator<extractDesc> op2 = OperatorFactory.get
- (new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), op3);
+ (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
}
@@ -243,9 +244,9 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
- new exprNodeColumnDesc(String.class, "value")), -1, 1, -1));
+ (Utilities.makeList(getStringColumn("key")),
+ Utilities.makeList(getStringColumn("key"),
+ getStringColumn("value")), -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -260,7 +261,7 @@
OperatorFactory.get(getTestFilterDesc("0"), op4);
Operator<extractDesc> op2 = OperatorFactory.get
- (new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), op3);
+ (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
}
@@ -276,9 +277,9 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
+ (Utilities.makeList(getStringColumn("key")),
Utilities.makeList
- (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), 1, -1));
+ (getStringColumn("value")), Byte.valueOf((byte)0), 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -286,8 +287,8 @@
Operator<reduceSinkDesc> op2 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
+ (Utilities.makeList(getStringColumn("key")),
+ Utilities.makeList(getStringColumn("key")),
Byte.valueOf((byte)1),
Integer.MAX_VALUE, -1));
@@ -302,10 +303,10 @@
Operator<selectDesc> op5 = OperatorFactory.get
(new selectDesc
(Utilities.makeList
- (new exprNodeColumnDesc(String.class, Utilities.ReduceField.ALIAS.toString()),
- new exprNodeFieldDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class),
+ (getStringColumn(Utilities.ReduceField.ALIAS.toString()),
+ new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(
- TypeInfoFactory.getPrimitiveTypeInfo(String.class)),
+ TypeInfoFactory.stringTypeInfo),
Utilities.ReduceField.VALUE.toString()),
"0",
false))), op4);
@@ -321,9 +322,9 @@
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
- new exprNodeColumnDesc(String.class, "tvalue")),
+ (Utilities.makeList(getStringColumn("tkey")),
+ Utilities.makeList(getStringColumn("tkey"),
+ getStringColumn("tvalue")),
-1, 1, -1));
Operator<scriptDesc> op0 = OperatorFactory.get
@@ -333,8 +334,8 @@
op1);
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
- Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
- new exprNodeColumnDesc(String.class, "value"))), op0);
+ Utilities.makeList(getStringColumn("key"),
+ getStringColumn("value"))), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -346,11 +347,15 @@
Utilities.defaultTd, false));
Operator<extractDesc> op2 = OperatorFactory.get
- (new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), op3);
+ (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
}
+ public static exprNodeColumnDesc getStringColumn(String columnName) {
+ return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName);
+ }
+
@SuppressWarnings("unchecked")
private void populateMapRedPlan5(Table src) {
mr.setNumReduceTasks(Integer.valueOf(1));
@@ -359,14 +364,14 @@
Operator<reduceSinkDesc> op0 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "0")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "0"),
- new exprNodeColumnDesc(String.class, "1")),
+ (Utilities.makeList(getStringColumn("0")),
+ Utilities.makeList(getStringColumn("0"),
+ getStringColumn("1")),
-1, 1, -1));
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
- Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
- new exprNodeColumnDesc(String.class, "value"))), op0);
+ Utilities.makeList(getStringColumn("key"),
+ getStringColumn("value"))), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
@@ -378,7 +383,7 @@
Utilities.defaultTd, false));
Operator<extractDesc> op2 = OperatorFactory.get
- (new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), op3);
+ (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
}
@@ -391,9 +396,9 @@
Operator<reduceSinkDesc> op1 = OperatorFactory.get
(PlanUtils.getReduceSinkDesc(
- Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
- Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
- new exprNodeColumnDesc(String.class, "tvalue")),
+ Utilities.makeList(getStringColumn("tkey")),
+ Utilities.makeList(getStringColumn("tkey"),
+ getStringColumn("tvalue")),
-1, 1, -1));
Operator<scriptDesc> op0 = OperatorFactory.get
@@ -403,8 +408,8 @@
op1);
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
- Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
- new exprNodeColumnDesc(String.class, "value"))), op0);
+ Utilities.makeList(getStringColumn("key"),
+ getStringColumn("value"))), op0);
Utilities.addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -419,7 +424,7 @@
OperatorFactory.get(getTestFilterDesc("0"), op3);
Operator<extractDesc> op5 = OperatorFactory.get
- (new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), op2);
+ (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op2);
mr.setReducer(op5);
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Tue Apr 14 22:54:39 2009
@@ -22,7 +22,10 @@
import java.io.*;
import java.util.*;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
@@ -35,15 +38,17 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.io.Text;
public class TestExpressionEvaluator extends TestCase {
// this is our row to test expressions on
protected InspectableObject r;
- ArrayList<String> col1;
+ ArrayList<Text> col1;
TypeInfo col1Type;
- ArrayList<String> cola;
+ ArrayList<Text> cola;
TypeInfo colaType;
ArrayList<Object> data;
ArrayList<String> names;
@@ -51,19 +56,19 @@
TypeInfo dataType;
public TestExpressionEvaluator() {
- col1 = new ArrayList<String> ();
- col1.add("0");
- col1.add("1");
- col1.add("2");
- col1.add("3");
+ col1 = new ArrayList<Text> ();
+ col1.add(new Text("0"));
+ col1.add(new Text("1"));
+ col1.add(new Text("2"));
+ col1.add(new Text("3"));
col1Type = TypeInfoFactory.getListTypeInfo(
- TypeInfoFactory.getPrimitiveTypeInfo(String.class));
- cola = new ArrayList<String> ();
- cola.add("a");
- cola.add("b");
- cola.add("c");
+ TypeInfoFactory.stringTypeInfo);
+ cola = new ArrayList<Text> ();
+ cola.add(new Text("a"));
+ cola.add(new Text("b"));
+ cola.add(new Text("c"));
colaType = TypeInfoFactory.getListTypeInfo(
- TypeInfoFactory.getPrimitiveTypeInfo(String.class));
+ TypeInfoFactory.stringTypeInfo);
try {
data = new ArrayList<Object>();
data.add(col1);
@@ -97,7 +102,8 @@
// evaluate on row
InspectableObject result = new InspectableObject();
eval.evaluate(r.o, r.oi, result);
- assertEquals(result.o, cola);
+ Object standardResult = ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.WRITABLE);
+ assertEquals(cola, standardResult);
System.out.println("ExprNodeColumnEvaluator ok");
} catch (Throwable e) {
e.printStackTrace();
@@ -118,7 +124,8 @@
// evaluate on row
InspectableObject result = new InspectableObject();
eval.evaluate(r.o, r.oi, result);
- assertEquals(result.o, "1a");
+ assertEquals("1a",
+ ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.JAVA));
System.out.println("ExprNodeFuncEvaluator ok");
} catch (Throwable e) {
e.printStackTrace();
@@ -131,13 +138,14 @@
// get a evaluator for a string concatenation expression
exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
- exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Double.class.getName(), col11desc);
+ exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
// evaluate on row
InspectableObject result = new InspectableObject();
eval.evaluate(r.o, r.oi, result);
- assertEquals(result.o, Double.valueOf("1"));
+ assertEquals(Double.valueOf("1"),
+ ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.JAVA));
System.out.println("testExprNodeConversionEvaluator ok");
} catch (Throwable e) {
e.printStackTrace();
@@ -145,16 +153,17 @@
}
}
- private static void measureSpeed(String expr, int times, ExprNodeEvaluator eval, InspectableObject input, Object standardOutput) throws HiveException {
+ private static void measureSpeed(String expr, int times, ExprNodeEvaluator eval, InspectableObject input, Object standardJavaOutput) throws HiveException {
System.out.println("Evaluating " + expr + " for " + times + " times");
// evaluate on row
InspectableObject output = new InspectableObject();
long start = System.currentTimeMillis();
for (int i=0; i<times; i++) {
eval.evaluate(input.o, input.oi, output);
- assertEquals(output.o, standardOutput);
}
long end = System.currentTimeMillis();
+ assertEquals(standardJavaOutput,
+ ObjectInspectorUtils.copyToStandardObject(output.o, output.oi, ObjectInspectorCopyOption.JAVA));
System.out.println("Evaluation finished: " + String.format("%2.3f", (end - start)*0.001) + " seconds, "
+ String.format("%2.3f", (end - start)*1000.0/times) + " seconds/million call.");
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Tue Apr 14 22:54:39 2009
@@ -36,8 +36,10 @@
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
public class TestOperators extends TestCase {
@@ -51,9 +53,9 @@
names.add("col1");
names.add("col2");
ArrayList<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>(3);
- objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
- objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
- objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
+ objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+ objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+ objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
for(int i=0; i<5; i++) {
ArrayList<String> data = new ArrayList<String> ();
data.add(""+i);
@@ -72,10 +74,10 @@
public void testBaseFilterOperator() throws Throwable {
try {
System.out.println("Testing Filter Operator");
- exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col0");
- exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col1");
- exprNodeDesc col2 = new exprNodeColumnDesc(String.class, "col2");
- exprNodeDesc zero = new exprNodeConstantDesc(String.class, "0");
+ exprNodeDesc col0 = TestExecDriver.getStringColumn("col0");
+ exprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
+ exprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
+ exprNodeDesc zero = new exprNodeConstantDesc("0");
exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2);
@@ -95,9 +97,9 @@
Map<Enum<?>, Long> results = op.getStats();
System.out.println("filtered = " + results.get(FilterOperator.Counter.FILTERED));
- assertEquals(results.get(FilterOperator.Counter.FILTERED), Long.valueOf(4));
+ assertEquals(Long.valueOf(4), results.get(FilterOperator.Counter.FILTERED));
System.out.println("passed = " + results.get(FilterOperator.Counter.PASSED));
- assertEquals(results.get(FilterOperator.Counter.PASSED), Long.valueOf(1));
+ assertEquals(Long.valueOf(1), results.get(FilterOperator.Counter.PASSED));
/*
for(Enum e: results.keySet()) {
@@ -116,12 +118,11 @@
try {
System.out.println("Testing FileSink Operator");
// col1
- exprNodeDesc exprDesc1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class),
- "col1");
+ exprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
// col2
ArrayList<exprNodeDesc> exprDesc2children = new ArrayList<exprNodeDesc>();
- exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
+ exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
exprNodeDesc expr2 = new exprNodeConstantDesc("1");
exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
@@ -163,10 +164,10 @@
try {
System.out.println("Testing Script Operator");
// col1
- exprNodeDesc exprDesc1 = new exprNodeColumnDesc(String.class, "col1");
+ exprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
// col2
- exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
+ exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
exprNodeDesc expr2 = new exprNodeConstantDesc("1");
exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
@@ -219,8 +220,10 @@
assert(soi != null);
StructField a = soi.getStructFieldRef("a");
StructField b = soi.getStructFieldRef("b");
- assertEquals(""+(i+1), soi.getStructFieldData(io.o, a));
- assertEquals((i) + "1", soi.getStructFieldData(io.o, b));
+ assertEquals(""+(i+1), ((PrimitiveObjectInspector)a.getFieldObjectInspector())
+ .getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
+ assertEquals((i) + "1", ((PrimitiveObjectInspector)b.getFieldObjectInspector())
+ .getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
}
System.out.println("Script Operator ok");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Tue Apr 14 22:54:39 2009
@@ -43,8 +43,8 @@
try {
// initialize a complete map reduce configuration
- exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F1);
- exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F2);
+ exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1);
+ exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2);
exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2);
filterDesc filterCtx = new filterDesc(filterExpr);
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Tue Apr 14 22:54:39 2009
@@ -19,12 +19,20 @@
package org.apache.hadoop.hive.ql.udf;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
/**
* A UDF for testing, which evaluates the length of a string.
*/
public class UDFTestLength extends UDF {
- public Integer evaluate(String s) {
- return s == null ? null : s.length();
+
+ IntWritable result = new IntWritable();
+ public IntWritable evaluate(Text s) {
+ if (s == null) {
+ return null;
+ }
+ result.set(s.toString().length());
+ return result;
}
}
\ No newline at end of file
Added: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=764994&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (added)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Tue Apr 14 22:54:39 2009
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+
+/**
+ * A UDF for testing, which evaluates the length of a string.
+ * This UDF uses Java Primitive classes for parameters.
+ */
+public class UDFTestLength2 extends UDF {
+
+ public Integer evaluate(String s) {
+ if (s == null) {
+ return null;
+ }
+ return Integer.valueOf(s.length());
+ }
+}
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_testlength2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_testlength2.q?rev=764994&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_testlength2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_testlength2.q Tue Apr 14 22:54:39 2009
@@ -0,0 +1,10 @@
+EXPLAIN
+CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2';
+
+CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2';
+
+CREATE TABLE dest1(len INT);
+
+FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength2(src.value);
+
+SELECT dest1.* FROM dest1;
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out Tue Apr 14 22:54:39 2009
@@ -27,7 +27,7 @@
Select Operator
expressions:
expr: 0
- type: Void
+ type: void
expr: UDFToInteger(1)
type: int
File Output Operator
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out?rev=764994&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out Tue Apr 14 22:54:39 2009
@@ -0,0 +1,510 @@
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATEFUNCTION testlength2 'org.apache.hadoop.hive.ql.udf.UDFTestLength2')
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+
+
+7
+6
+7
+6
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+5
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+7
+6
+6
+5
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+6
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+6
+5
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+7
+7
+7
+7
+5
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+6
+7
+7
+7
+7
+7
+7
+6
+7
+6
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+6
+5
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+6
+6
+7
+6
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+5
+7
+7
+7
+7
+7
+6
+6
+7
+6
+6
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+6
+6
+6
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+6
+7
+6
+6
+7
+6
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+5
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+5
+6
+7
+7
+7
+6
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+5
+6
+7
+7
+7
+6
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+6
+7
+7
+6
+7
+7
+7
+7
+5
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
+7
+7
+6
+6
+6
+7
+7
+7
+7
+7
+7
+7
+7
+7
+6
+7
+7
+7
+7
+6
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out Tue Apr 14 22:54:39 2009
@@ -1,4 +1,4 @@
2009-03-20 11:30:01 1237573801
2009-03-20 1237532400
2009 Mar 20 11:30:01 am 1237573801
-random_string 0
+random_string NULL
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param1.q.out?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param1.q.out Tue Apr 14 22:54:39 2009
@@ -1,2 +1,2 @@
Semantic Exception:
-line 2:36 Function Argument Type Mismatch concat: Looking for UDF "concat" with parameters [class java.lang.String, class java.lang.String, class java.lang.String]
\ No newline at end of file
+line 2:36 Function Argument Type Mismatch concat: Looking for UDF "concat" with parameters [class org.apache.hadoop.io.Text, class org.apache.hadoop.io.Text, class org.apache.hadoop.io.Text]
\ No newline at end of file
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param2.q.out?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param2.q.out Tue Apr 14 22:54:39 2009
@@ -1,2 +1,2 @@
Semantic Exception:
-line 2:36 Function Argument Type Mismatch substr: Looking for UDF "substr" with parameters [class java.lang.String, class java.lang.String]
\ No newline at end of file
+line 2:36 Function Argument Type Mismatch substr: Looking for UDF "substr" with parameters [class org.apache.hadoop.io.Text, class org.apache.hadoop.io.Text]
\ No newline at end of file
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/errors/unknown_function5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/errors/unknown_function5.q.out?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/errors/unknown_function5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/errors/unknown_function5.q.out Tue Apr 14 22:54:39 2009
@@ -1,2 +1,2 @@
Semantic Exception:
-line 2:44 Function Argument Type Mismatch concat: Looking for UDF "concat" with parameters [class java.lang.String]
\ No newline at end of file
+line 2:44 Function Argument Type Mismatch concat: Looking for UDF "concat" with parameters [class org.apache.hadoop.io.Text]
\ No newline at end of file
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="childTasks">
<object class="java.util.ArrayList">
@@ -31,7 +31,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/406061868.10000.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/177607877.10000.insclause-0</string>
</void>
<void property="table">
<object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -84,7 +84,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/dest1</string>
</void>
</object>
</void>
@@ -135,7 +135,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/406061868.10000.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/177607877.10000.insclause-0</string>
</void>
<void property="tableInfo">
<object idref="tableDesc0"/>
@@ -163,8 +163,8 @@
</void>
<void property="type">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -177,8 +177,8 @@
</void>
<void property="type">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -320,10 +320,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -371,8 +371,8 @@
</void>
<void property="typeInfo">
<object class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Boolean</class>
+ <void property="typeName">
+ <string>boolean</string>
</void>
</object>
</void>
@@ -560,7 +560,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src_thrift</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src_thrift</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src_thrift</string>
@@ -572,7 +572,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src_thrift</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src_thrift</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -632,7 +632,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src_thrift</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src_thrift</string>
</void>
</object>
</void>
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="id">
<string>Stage-2</string>
@@ -30,7 +30,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/1243941799.10001.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/17943362.10001.insclause-0</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -80,8 +80,8 @@
</void>
<void property="type">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -94,8 +94,8 @@
</void>
<void property="type">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Double</class>
+ <void property="typeName">
+ <string>double</string>
</void>
</object>
</void>
@@ -138,8 +138,8 @@
</void>
<void property="type">
<object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Boolean</class>
+ <void property="typeName">
+ <string>boolean</string>
</void>
</object>
</void>
@@ -176,10 +176,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
<void index="1">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -224,10 +224,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
<void index="1">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -244,7 +244,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -296,10 +296,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
<void index="1">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -326,7 +326,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -368,10 +368,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -416,7 +416,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Boolean</class>
+ <class>org.apache.hadoop.io.BooleanWritable</class>
</void>
</array>
</object>
@@ -451,7 +451,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -486,10 +486,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -516,7 +516,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -611,10 +611,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
<void index="1">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -631,7 +631,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -645,8 +645,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo3" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -669,7 +669,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -811,7 +811,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src</string>
@@ -823,7 +823,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -879,7 +879,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
</void>
</object>
</void>
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="childTasks">
<object class="java.util.ArrayList">
@@ -31,7 +31,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/209369405/364337103.10000.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/146879216/330242162.10000.insclause-0</string>
</void>
<void property="table">
<object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -84,7 +84,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/dest1</string>
</void>
</object>
</void>
@@ -131,8 +131,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -200,8 +200,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Double</class>
+ <void property="typeName">
+ <string>double</string>
</void>
</object>
</void>
@@ -307,7 +307,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -321,7 +321,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -338,10 +338,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -362,8 +362,8 @@
<object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">
<void property="typeInfo">
<object class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -499,7 +499,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src</string>
@@ -511,7 +511,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -567,7 +567,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
</void>
</object>
</void>
@@ -596,7 +596,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/209369405/364337103.10000.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/146879216/330242162.10000.insclause-0</string>
</void>
<void property="tableInfo">
<object idref="tableDesc0"/>
@@ -735,7 +735,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="id">
<string>Stage-2</string>
@@ -29,8 +29,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -116,8 +116,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Long</class>
+ <void property="typeName">
+ <string>bigint</string>
</void>
</object>
</void>
@@ -130,8 +130,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Double</class>
+ <void property="typeName">
+ <string>double</string>
</void>
</object>
</void>
@@ -241,10 +241,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -265,8 +265,8 @@
<object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">
<void property="typeInfo">
<object id="PrimitiveTypeInfo3" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -293,13 +293,13 @@
<string>evaluate</string>
<array class="java.lang.Class" length="3">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
<void index="2">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -362,7 +362,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFCount</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFCount$UDAFCountEvaluator</class>
</void>
<void property="distinct">
<boolean>true</boolean>
@@ -379,10 +379,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -423,7 +423,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -437,7 +437,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -454,10 +454,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -637,7 +637,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src</string>
@@ -649,7 +649,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -705,7 +705,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
</void>
</object>
</void>
@@ -731,7 +731,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/847298005/67909601.10001.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/182085494/837559448.10001.insclause-0</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -825,10 +825,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -855,7 +855,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.Double</class>
+ <class>org.apache.hadoop.hive.serde2.io.DoubleWritable</class>
</void>
</array>
</object>
@@ -980,7 +980,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFCount</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFCount$UDAFCountEvaluator</class>
</void>
<void property="distinct">
<boolean>true</boolean>
@@ -1004,7 +1004,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="id">
<string>Stage-2</string>
@@ -33,8 +33,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -102,8 +102,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Double</class>
+ <void property="typeName">
+ <string>double</string>
</void>
</object>
</void>
@@ -273,10 +273,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -297,8 +297,8 @@
<object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">
<void property="typeInfo">
<object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -342,7 +342,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -356,7 +356,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -373,10 +373,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -424,7 +424,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFAvg</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFAvg$UDAFAvgEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -438,7 +438,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -455,10 +455,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -506,7 +506,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFAvg</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFAvg$UDAFAvgEvaluator</class>
</void>
<void property="distinct">
<boolean>true</boolean>
@@ -523,7 +523,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
@@ -540,10 +540,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -605,10 +605,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -663,10 +663,10 @@
<string>evaluate</string>
<array class="java.lang.Class" length="2">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -915,7 +915,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src</string>
@@ -927,7 +927,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -983,7 +983,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
</void>
</object>
</void>
@@ -1009,7 +1009,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/110442086.10001.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/906910226.10001.insclause-0</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -1242,7 +1242,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFSum</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFSum$UDAFSumEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -1263,7 +1263,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFAvg</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFAvg$UDAFAvgEvaluator</class>
</void>
<void property="parameters">
<object class="java.util.ArrayList">
@@ -1284,7 +1284,7 @@
<void method="add">
<object class="org.apache.hadoop.hive.ql.plan.aggregationDesc">
<void property="aggregationClass">
- <class>org.apache.hadoop.hive.ql.udf.UDAFAvg</class>
+ <class>org.apache.hadoop.hive.ql.udf.UDAFAvg$UDAFAvgEvaluator</class>
</void>
<void property="distinct">
<boolean>true</boolean>
@@ -1301,7 +1301,7 @@
<string>evaluate</string>
<array class="java.lang.Class" length="1">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
</array>
</object>
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder">
+<java version="1.6.0_07" class="java.beans.XMLDecoder">
<object class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="id">
<string>Stage-2</string>
@@ -33,8 +33,8 @@
</void>
<void property="typeInfo">
<object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.String</class>
+ <void property="typeName">
+ <string>string</string>
</void>
</object>
</void>
@@ -168,13 +168,13 @@
<string>evaluate</string>
<array class="java.lang.Class" length="3">
<void index="0">
- <class>java.lang.String</class>
+ <class>org.apache.hadoop.io.Text</class>
</void>
<void index="1">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
<void index="2">
- <class>java.lang.Integer</class>
+ <class>org.apache.hadoop.io.IntWritable</class>
</void>
</array>
</object>
@@ -195,8 +195,8 @@
<object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">
<void property="typeInfo">
<object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="primitiveClass">
- <class>java.lang.Integer</class>
+ <void property="typeName">
+ <string>int</string>
</void>
</object>
</void>
@@ -375,7 +375,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>src</string>
@@ -387,7 +387,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="partSpec">
<object class="java.util.LinkedHashMap"/>
@@ -443,7 +443,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src</string>
+ <string>file:/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/build/ql/test/data/warehouse/src</string>
</void>
</object>
</void>
@@ -469,7 +469,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/189021992.10001.insclause-0</string>
+ <string>/data/users/zshao/sync/apache-trunk-HIVE-266-trunk/ql/../build/ql/tmp/387461473.10001.insclause-0</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">