You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/04/15 00:54:45 UTC
svn commit: r764994 [1/12] - in /hadoop/hive/trunk: ./ data/conf/
eclipse-templates/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/
ql/lib/ ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/...
Author: namit
Date: Tue Apr 14 22:54:39 2009
New Revision: 764994
URL: http://svn.apache.org/viewvc?rev=764994&view=rev
Log:
HIVE-266. Use Text instead of String (Zheng Shao via namit)
Added:
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_testlength2.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/ByteWritable.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/ShortWritable.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveJavaObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveWritableObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/BooleanObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/ByteObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DoubleObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/FloatObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/IntObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBooleanObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaByteObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDoubleObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaFloatObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIntObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaLongObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaShortObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaStringObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaVoidObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/LongObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/ShortObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/StringObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/VoidObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableBooleanObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableByteObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDoubleObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableFloatObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIntObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableLongObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableShortObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableStringObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableVoidObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/WriteTextProtocol.java
Removed:
hadoop/hive/trunk/ql/lib/commons-jexl-1.1.LICENSE
hadoop/hive/trunk/ql/lib/commons-jexl-1.1.jar
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardPrimitiveObjectInspector.java
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/data/conf/hive-log4j.properties
hadoop/hive/trunk/eclipse-templates/.classpath
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hadoop/hive/trunk/ql/build.xml
hadoop/hive/trunk/ql/lib/README
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAFEvaluatorResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFMethodResolver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFIf.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqual.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqualOrGreaterThan.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqualOrLessThan.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPGreaterThan.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPLessThan.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotEqual.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param1.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_function_param2.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/errors/unknown_function5.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
hadoop/hive/trunk/serde/if/serde.thrift
hadoop/hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java
hadoop/hive/trunk/serde/src/gen-php/serde_constants.php
hadoop/hive/trunk/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyByte.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyShort.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ListObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/MapObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/MetadataListStructObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/ListTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/MapTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/PrimitiveTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Apr 14 22:54:39 2009
@@ -16,6 +16,8 @@
HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).
+ HIVE-266. Use Text instead of String (Zheng Shao via namit).
+
BUG FIXES
HIVE-381. Fix JDBC HiveResultSet's next function.
Modified: hadoop/hive/trunk/data/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/conf/hive-log4j.properties?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/data/conf/hive-log4j.properties (original)
+++ hadoop/hive/trunk/data/conf/hive-log4j.properties Tue Apr 14 22:54:39 2009
@@ -1,5 +1,5 @@
# Define some default values that can be overridden by system properties
-hive.root.logger=WARN,DRFA
+hive.root.logger=DEBUG,DRFA
hive.log.dir=${user.dir}/../build/ql/tmp/
hive.log.file=hive.log
Modified: hadoop/hive/trunk/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/eclipse-templates/.classpath?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/eclipse-templates/.classpath (original)
+++ hadoop/hive/trunk/eclipse-templates/.classpath Tue Apr 14 22:54:39 2009
@@ -21,7 +21,6 @@
<classpathentry exported="true" kind="lib" path="lib/log4j-1.2.15.jar"/>
<classpathentry exported="true" kind="lib" path="ql/lib/antlr-3.0.1.jar"/>
<classpathentry exported="true" kind="lib" path="ql/lib/antlr-runtime-3.0.1.jar"/>
- <classpathentry exported="true" kind="lib" path="ql/lib/commons-jexl-1.1.jar"/>
<classpathentry exported="true" kind="lib" path="testlibs/junit-3.8.1.jar"/>
<classpathentry kind="src" path="build/ql/gen-java"/>
<classpathentry kind="src" path="build/ql/test/src"/>
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Tue Apr 14 22:54:39 2009
@@ -178,6 +178,8 @@
Deserializer deserializer = SerDeUtils.lookupDeserializer(lib);
deserializer.initialize(conf, MetaStoreUtils.getSchema(table));
return deserializer;
+ } catch (RuntimeException e) {
+ throw e;
} catch (Exception e) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage());
MetaStoreUtils.printStackTrace(e);
@@ -595,7 +597,7 @@
*/
public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName, TypeInfo typeInfo) {
return new FieldSchema(
- fieldName, TypeInfoUtils.getTypeStringFromTypeInfo(typeInfo), "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo"
+ fieldName, typeInfo.getTypeName(), "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo"
);
}
Modified: hadoop/hive/trunk/ql/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/build.xml?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/build.xml (original)
+++ hadoop/hive/trunk/ql/build.xml Tue Apr 14 22:54:39 2009
@@ -142,12 +142,6 @@
<target name="jar" depends="compile">
<echo message="Jar: ${name}"/>
- <unzip src="lib/commons-jexl-1.1.jar" dest="${build.dir.hive}/jexl/classes">
- <patternset>
- <exclude name="META-INF"/>
- <exclude name="META-INF/MANIFEST.MF"/>
- </patternset>
- </unzip>
<unzip src="${hive.root}/lib/libthrift.jar" dest="${build.dir.hive}/thrift/classes">
<patternset>
<exclude name="META-INF"/>
@@ -171,7 +165,6 @@
<fileset dir="${build.dir.hive}/common/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/ql/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/serde/classes" includes="**/*.class"/>
- <fileset dir="${build.dir.hive}/jexl/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/thrift/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/commons-lang/classes" includes="**/StringUtils.class,**/WordUtils.class"/>
<fileset dir="${build.dir.hive}/json/classes" includes="**/*.class"/>
Modified: hadoop/hive/trunk/ql/lib/README
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/lib/README?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/lib/README (original)
+++ hadoop/hive/trunk/ql/lib/README Tue Apr 14 22:54:39 2009
@@ -2,7 +2,6 @@
---------------------------------------------------------------------------------------------------------------
* stringtemplate-3.1b1.jar - http://www.stringtemplate.org/download.html
-* commons-jexl-1.1.jar - http://commons.apache.org/downloads/download_jexl.cgi
* antlr-2.7.7.jar - http://www.antlr.org/download.html
* antlr-3.0.1.jar - http://www.antlr.org/download.html
* antlr-runtime-3.0.1.jar - http://www.antlr.org/download.html
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java Tue Apr 14 22:54:39 2009
@@ -20,6 +20,8 @@
import java.util.List;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
/**
* Exception thrown by the UDF and UDAF method resolvers in case a unique method is not found.
*
@@ -39,24 +41,24 @@
/**
* The list of parameter types.
*/
- List<Class<?>> argClasses;
+ List<TypeInfo> argTypeInfos;
/**
* Constructor.
*
* @param funcClass The UDF or UDAF class.
- * @param argClasses The list of argument types that lead to an ambiguity.
+ * @param argTypeInfos The list of argument types that lead to an ambiguity.
*/
- public AmbiguousMethodException(Class<?> funcClass, List<Class<?>> argClasses) {
+ public AmbiguousMethodException(Class<?> funcClass, List<TypeInfo> argTypeInfos) {
this.funcClass = funcClass;
- this.argClasses = argClasses;
+ this.argTypeInfos = argTypeInfos;
}
Class<?> getFunctionClass() {
return funcClass;
}
- List<Class<?>> getArgTypeList() {
- return argClasses;
+ List<TypeInfo> getArgTypeList() {
+ return argTypeInfos;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java Tue Apr 14 22:54:39 2009
@@ -52,7 +52,7 @@
// Create a standard copy of the object.
// In the future we can optimize this by doing copy-on-write.
// Here we always copy the object so that other operators can reuse the object for the next row.
- Object o = ObjectInspectorUtils.getStandardObject(row, rowInspector);
+ Object o = ObjectInspectorUtils.copyToStandardObject(row, rowInspector);
ObjectInspector oi = ObjectInspectorUtils.getStandardObjectInspector(rowInspector);
rowList.add(o);
rowInspectorList.add(oi);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java Tue Apr 14 22:54:39 2009
@@ -55,7 +55,7 @@
public ColumnInfo(String internalName, Class type) {
this.internalName = internalName;
- this.type = TypeInfoFactory.getPrimitiveTypeInfo(type);
+ this.type = TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(type);
}
public TypeInfo getType() {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java Tue Apr 14 22:54:39 2009
@@ -24,6 +24,9 @@
import java.util.List;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
/**
* The class implements the method resolution for overloaded comparison operators. The
@@ -51,30 +54,24 @@
* @see org.apache.hadoop.hive.ql.exec.UDFMethodResolver#getEvalMethod(java.util.List)
*/
@Override
- public Method getEvalMethod(List<Class<?>> argClasses)
+ public Method getEvalMethod(List<TypeInfo> argTypeInfos)
throws AmbiguousMethodException {
- assert(argClasses.size() == 2);
+ assert(argTypeInfos.size() == 2);
- List<Class<?>> pClasses = null;
- if (argClasses.get(0) == Void.class ||
- argClasses.get(1) == Void.class) {
- pClasses = new ArrayList<Class<?>>();
- pClasses.add(Double.class);
- pClasses.add(Double.class);
+ List<TypeInfo> pTypeInfos = null;
+ if (argTypeInfos.get(0).equals(TypeInfoFactory.voidTypeInfo) ||
+ argTypeInfos.get(1).equals(TypeInfoFactory.voidTypeInfo)) {
+ pTypeInfos = new ArrayList<TypeInfo>();
+ pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
+ pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
}
- else if (argClasses.get(0) == argClasses.get(1)) {
- pClasses = argClasses;
- }
- else if (argClasses.get(0) == java.sql.Date.class ||
- argClasses.get(1) == java.sql.Date.class) {
- pClasses = new ArrayList<Class<?>>();
- pClasses.add(java.sql.Date.class);
- pClasses.add(java.sql.Date.class);
+ else if (argTypeInfos.get(0) == argTypeInfos.get(1)) {
+ pTypeInfos = argTypeInfos;
}
else {
- pClasses = new ArrayList<Class<?>>();
- pClasses.add(Double.class);
- pClasses.add(Double.class);
+ pTypeInfos = new ArrayList<TypeInfo>();
+ pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
+ pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
}
Method udfMethod = null;
@@ -82,20 +79,20 @@
for(Method m: Arrays.asList(udfClass.getMethods())) {
if (m.getName().equals("evaluate")) {
- Class<?>[] argumentTypeInfos = m.getParameterTypes();
+ List<TypeInfo> acceptedTypeInfos = TypeInfoUtils.getParameterTypeInfos(m);
- boolean match = (argumentTypeInfos.length == pClasses.size());
+ boolean match = (acceptedTypeInfos.size() == pTypeInfos.size());
- for(int i=0; i<pClasses.size() && match; i++) {
- Class<?> accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]);
- if (accepted != pClasses.get(i)) {
+ for(int i=0; i<pTypeInfos.size() && match; i++) {
+ TypeInfo accepted = acceptedTypeInfos.get(i);
+ if (accepted != pTypeInfos.get(i)) {
match = false;
}
}
if (match) {
if (udfMethod != null) {
- throw new AmbiguousMethodException(udfClass, argClasses);
+ throw new AmbiguousMethodException(udfClass, argTypeInfos);
}
else {
udfMethod = m;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java Tue Apr 14 22:54:39 2009
@@ -22,6 +22,8 @@
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
/**
* The default UDAF Method resolver. This resolver is used for resolving the UDAF methods are
* used for partial and final evaluation given the list of the argument types. The getEvalMethod goes through all the
@@ -50,19 +52,11 @@
*
* @param argClasses The list of the parameter types.
*/
- public Class<? extends UDAFEvaluator> getEvaluatorClass(List<Class<?>> argClasses)
+ public Class<? extends UDAFEvaluator> getEvaluatorClass(List<TypeInfo> argClasses)
throws AmbiguousMethodException {
ArrayList<Class<? extends UDAFEvaluator>> classList = new ArrayList<Class<? extends UDAFEvaluator>>();
- // Add the udaf class if it implements and evaluator
- for(Class<?> iface: udafClass.getInterfaces()) {
- if (iface == UDAFEvaluator.class) {
- Class<? extends UDAFEvaluator> udafClass2 = (Class<? extends UDAFEvaluator>) udafClass;
- classList.add(udafClass2);
- }
- }
-
// Add all the public member classes that implement an evaluator
for(Class<?> enclClass: udafClass.getClasses()) {
for(Class<?> iface: enclClass.getInterfaces()) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java Tue Apr 14 22:54:39 2009
@@ -21,6 +21,8 @@
import java.lang.reflect.Method;
import java.util.List;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
/**
* The default UDF Method resolver. This resolver is used for resolving the UDF method that is to be
* used for evaluation given the list of the argument types. The getEvalMethod goes through all the
@@ -50,7 +52,8 @@
* @param argClasses The list of the argument types that need to matched with the evaluate
* function signature.
*/
- public Method getEvalMethod(List<Class<?>> argClasses)
+ @Override
+ public Method getEvalMethod(List<TypeInfo> argClasses)
throws AmbiguousMethodException {
Method m = FunctionRegistry.getMethodInternal(udfClass, "evaluate", false, argClasses);
if (m == null) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java Tue Apr 14 22:54:39 2009
@@ -34,8 +34,10 @@
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Writable;
@@ -85,7 +87,7 @@
ArrayList<ObjectInspector> ois = new ArrayList<ObjectInspector>();
ois.add(keyObjectInspector);
ois.add(valueObjectInspector[tag]);
- ois.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(Byte.class));
+ ois.add(PrimitiveObjectInspectorFactory.writableByteObjectInspector);
rowObjectInspector[tag] = ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList(fieldNames), ois);
}
@@ -103,6 +105,7 @@
private BytesWritable groupKey;
ArrayList<Object> row = new ArrayList<Object>(3);
+ ByteWritable tag = new ByteWritable();
public void reduce(Object key, Iterator values,
OutputCollector output,
Reporter reporter) throws IOException {
@@ -122,11 +125,11 @@
try {
BytesWritable keyWritable = (BytesWritable)key;
- byte tag = 0;
+ tag.set((byte)0);
if (isTagged) {
// remove the tag
int size = keyWritable.getSize() - 1;
- tag = keyWritable.get()[size];
+ tag.set(keyWritable.get()[size]);
keyWritable.setSize(size);
}
@@ -153,15 +156,15 @@
Writable valueWritable = (Writable) values.next();
//System.err.print(who.getHo().toString());
try {
- valueObject[tag] = inputValueDeserializer[tag].deserialize(valueWritable);
+ valueObject[tag.get()] = inputValueDeserializer[tag.get()].deserialize(valueWritable);
} catch (SerDeException e) {
throw new HiveException(e);
}
row.clear();
row.add(keyObject);
- row.add(valueObject[tag]);
+ row.add(valueObject[tag.get()]);
row.add(tag);
- reducer.process(row, rowObjectInspector[tag]);
+ reducer.process(row, rowObjectInspector[tag.get()]);
}
} catch (HiveException e) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java Tue Apr 14 22:54:39 2009
@@ -22,23 +22,26 @@
import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
public class ExprNodeConstantEvaluator extends ExprNodeEvaluator {
protected exprNodeConstantDesc expr;
transient ObjectInspector objectInspector;
+ transient Object value;
public ExprNodeConstantEvaluator(exprNodeConstantDesc expr) {
this.expr = expr;
- objectInspector = ObjectInspectorFactory.getStandardPrimitiveObjectInspector(expr.getTypeInfo().getPrimitiveClass());
+ objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
+ ((PrimitiveTypeInfo)expr.getTypeInfo()).getPrimitiveCategory());
+ value = expr.getValue();
}
public void evaluate(Object row, ObjectInspector rowInspector,
InspectableObject result) throws HiveException {
assert(result != null);
- result.o = expr.getValue();
+ result.o = value;
result.oi = objectInspector;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java Tue Apr 14 22:54:39 2009
@@ -29,7 +29,11 @@
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.util.ReflectionUtils;
public class ExprNodeFuncEvaluator extends ExprNodeEvaluator {
@@ -39,6 +43,7 @@
protected exprNodeFuncDesc expr;
transient ExprNodeEvaluator[] paramEvaluators;
transient InspectableObject[] paramInspectableObjects;
+ transient boolean[] paramIsPrimitiveWritable;
transient Object[] paramValues;
transient UDF udf;
transient Method udfMethod;
@@ -55,13 +60,21 @@
int paramNumber = expr.getChildren().size();
paramEvaluators = new ExprNodeEvaluator[paramNumber];
paramInspectableObjects = new InspectableObject[paramNumber];
+ paramIsPrimitiveWritable = new boolean[paramNumber];
for(int i=0; i<paramNumber; i++) {
paramEvaluators[i] = ExprNodeEvaluatorFactory.get(expr.getChildExprs().get(i));
paramInspectableObjects[i] = new InspectableObject();
+ paramIsPrimitiveWritable[i] = PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(udfMethod.getParameterTypes()[i]);
}
paramValues = new Object[expr.getChildren().size()];
- outputObjectInspector = ObjectInspectorFactory.getStandardPrimitiveObjectInspector(
- udfMethod.getReturnType());
+ // The return type of a function can be of either Java Primitive Type/Class or Writable Class.
+ if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(udfMethod.getReturnType())) {
+ outputObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+ PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveWritableClass(udfMethod.getReturnType()).primitiveCategory);
+ } else {
+ outputObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
+ PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaClass(udfMethod.getReturnType()).primitiveCategory);
+ }
}
public void evaluate(Object row, ObjectInspector rowInspector,
@@ -76,16 +89,30 @@
// TODO: Both getList and getMap are not very efficient.
// We should convert them to UDFTemplate - UDFs that accepts Object with
// ObjectInspectors when needed.
- if (c.equals(Category.LIST)) {
- // Need to pass a Java List for List type
- paramValues[i] = ((ListObjectInspector)paramInspectableObjects[i].oi)
- .getList(paramInspectableObjects[i].o);
- } else if (c.equals(Category.MAP)) {
- // Need to pass a Java Map for Map type
- paramValues[i] = ((MapObjectInspector)paramInspectableObjects[i].oi)
- .getMap(paramInspectableObjects[i].o);
- } else {
- paramValues[i] = paramInspectableObjects[i].o;
+ switch(c) {
+ case LIST: {
+ // Need to pass a Java List for List type
+ paramValues[i] = ((ListObjectInspector)paramInspectableObjects[i].oi)
+ .getList(paramInspectableObjects[i].o);
+ break;
+ }
+ case MAP: {
+ // Need to pass a Java Map for Map type
+ paramValues[i] = ((MapObjectInspector)paramInspectableObjects[i].oi)
+ .getMap(paramInspectableObjects[i].o);
+ break;
+ }
+ case PRIMITIVE: {
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector)paramInspectableObjects[i].oi;
+ paramValues[i] = (paramIsPrimitiveWritable[i]
+ ? poi.getPrimitiveWritableObject(paramInspectableObjects[i].o)
+ : poi.getPrimitiveJavaObject(paramInspectableObjects[i].o));
+ break;
+ }
+ default: {
+ // STRUCT
+ paramValues[i] = paramInspectableObjects[i].o;
+ }
}
}
result.o = FunctionRegistry.invoke(udfMethod, udf, paramValues);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java Tue Apr 14 22:54:39 2009
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
public class ExprNodeIndexEvaluator extends ExprNodeEvaluator {
@@ -48,16 +49,25 @@
indexEvaluator.evaluate(row, rowInspector, indexInspectableObject);
if (mainInspectableObject.oi.getCategory() == Category.LIST) {
- int index = ((Number)indexInspectableObject.o).intValue();
+ PrimitiveObjectInspector poi = ((PrimitiveObjectInspector)indexInspectableObject.oi);
+ Object indexObject = poi.getPrimitiveJavaObject(indexInspectableObject.o);
+ int index = ((Number)indexObject).intValue();
ListObjectInspector loi = (ListObjectInspector)mainInspectableObject.oi;
result.oi = loi.getListElementObjectInspector();
result.o = loi.getListElement(mainInspectableObject.o, index);
}
else if (mainInspectableObject.oi.getCategory() == Category.MAP) {
+ PrimitiveObjectInspector poi = ((PrimitiveObjectInspector)indexInspectableObject.oi);
MapObjectInspector moi = (MapObjectInspector)mainInspectableObject.oi;
result.oi = moi.getMapValueObjectInspector();
- result.o = moi.getMapValueElement(mainInspectableObject.o, indexInspectableObject.o);
+ if (((PrimitiveObjectInspector)moi.getMapKeyObjectInspector()).isWritable()) {
+ Object indexObject = poi.getPrimitiveWritableObject(indexInspectableObject.o);
+ result.o = moi.getMapValueElement(mainInspectableObject.o, indexObject);
+ } else {
+ Object indexObject = poi.getPrimitiveJavaObject(indexInspectableObject.o);
+ result.o = moi.getMapValueElement(mainInspectableObject.o, indexObject);
+ }
}
else {
// Should never happen because we checked this in SemanticAnalyzer.getXpathOrFuncExprNodeDesc
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java Tue Apr 14 22:54:39 2009
@@ -56,6 +56,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
/**
* FetchTask implementation
@@ -159,7 +160,7 @@
for(String key: partKeys) {
partNames.add(key);
partValues.add(partSpec.get(key));
- partObjectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
+ partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
rowObjectInspector = (StructObjectInspector)serde.getObjectInspector();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java Tue Apr 14 22:54:39 2009
@@ -25,6 +25,8 @@
import org.apache.hadoop.hive.ql.plan.filterDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.Reporter;
@@ -61,7 +63,8 @@
public void process(Object row, ObjectInspector rowInspector) throws HiveException {
try {
conditionEvaluator.evaluate(row, rowInspector, conditionInspectableObject);
- Boolean ret = (Boolean)(conditionInspectableObject.o);
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector)conditionInspectableObject.oi;
+ Boolean ret = (Boolean)poi.getPrimitiveJavaObject(conditionInspectableObject.o);
if (Boolean.TRUE.equals(ret)) {
forward(row, rowInspector);
passed_count.set(passed_count.get()+1);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Tue Apr 14 22:54:39 2009
@@ -33,7 +33,12 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.groupByDesc;
import org.apache.hadoop.hive.ql.udf.*;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
public class FunctionRegistry {
@@ -130,24 +135,22 @@
// Aliases for Java Class Names
// These are used in getImplicitConvertUDFMethod
- registerUDF(Boolean.class.getName(), UDFToBoolean.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.BOOLEAN_TYPE_NAME, UDFToBoolean.class, OperatorType.PREFIX, false,
UDFToBoolean.class.getSimpleName());
- registerUDF(Byte.class.getName(), UDFToByte.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.TINYINT_TYPE_NAME, UDFToByte.class, OperatorType.PREFIX, false,
UDFToByte.class.getSimpleName());
- registerUDF(Short.class.getName(), UDFToShort.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.SMALLINT_TYPE_NAME, UDFToShort.class, OperatorType.PREFIX, false,
UDFToShort.class.getSimpleName());
- registerUDF(Integer.class.getName(), UDFToInteger.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.INT_TYPE_NAME, UDFToInteger.class, OperatorType.PREFIX, false,
UDFToInteger.class.getSimpleName());
- registerUDF(Long.class.getName(), UDFToLong.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.BIGINT_TYPE_NAME, UDFToLong.class, OperatorType.PREFIX, false,
UDFToLong.class.getSimpleName());
- registerUDF(Float.class.getName(), UDFToFloat.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.FLOAT_TYPE_NAME, UDFToFloat.class, OperatorType.PREFIX, false,
UDFToFloat.class.getSimpleName());
- registerUDF(Double.class.getName(), UDFToDouble.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.DOUBLE_TYPE_NAME, UDFToDouble.class, OperatorType.PREFIX, false,
UDFToDouble.class.getSimpleName());
- registerUDF(String.class.getName(), UDFToString.class, OperatorType.PREFIX, false,
+ registerUDF(Constants.STRING_TYPE_NAME, UDFToString.class, OperatorType.PREFIX, false,
UDFToString.class.getSimpleName());
- registerUDF(java.sql.Date.class.getName(), UDFToDate.class, OperatorType.PREFIX, false,
- UDFToDate.class.getSimpleName());
// Aggregate functions
registerUDAF("sum", UDAFSum.class);
@@ -215,53 +218,53 @@
return result;
}
- static Map<Class<?>, Integer> numericTypes;
+ static Map<TypeInfo, Integer> numericTypes = new HashMap<TypeInfo, Integer>();
+ static List<TypeInfo> numericTypeList = new ArrayList<TypeInfo>();
+ static void registerNumericType(String typeName, int level) {
+ TypeInfo t = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
+ numericTypeList.add(t);
+ numericTypes.put(t, level);
+ }
static {
- numericTypes = new HashMap<Class<?>, Integer>();
- numericTypes.put(Byte.class, 1);
- numericTypes.put(Short.class, 2);
- numericTypes.put(Integer.class, 3);
- numericTypes.put(Long.class, 4);
- numericTypes.put(Float.class, 5);
- numericTypes.put(Double.class, 6);
- numericTypes.put(String.class, 7);
+ registerNumericType(Constants.TINYINT_TYPE_NAME, 1);
+ registerNumericType(Constants.SMALLINT_TYPE_NAME, 2);
+ registerNumericType(Constants.INT_TYPE_NAME, 3);
+ registerNumericType(Constants.BIGINT_TYPE_NAME, 4);
+ registerNumericType(Constants.FLOAT_TYPE_NAME, 5);
+ registerNumericType(Constants.DOUBLE_TYPE_NAME, 6);
+ registerNumericType(Constants.STRING_TYPE_NAME, 7);
}
/**
- * Find a common class that objects of both Class a and Class b can convert to.
+ * Find a common class that objects of both TypeInfo a and TypeInfo b can convert to.
* @return null if no common class could be found.
*/
- public static Class<?> getCommonClass(Class<?> a, Class<?> b) {
- // Equal
+ public static TypeInfo getCommonClass(TypeInfo a, TypeInfo b) {
+ // If same return one of them
if (a.equals(b)) return a;
- // Java class inheritance hierarchy
- if (a.isAssignableFrom(b)) return a;
- if (b.isAssignableFrom(a)) return b;
- // Prefer String to Number conversion before implicit conversions
- if (Number.class.isAssignableFrom(a) && b.equals(String.class)) return Double.class;
- if (Number.class.isAssignableFrom(b) && a.equals(String.class)) return Double.class;
- // implicit conversions
- if (FunctionRegistry.implicitConvertable(a, b)) return b;
- if (FunctionRegistry.implicitConvertable(b, a)) return a;
+
+ for (TypeInfo t: numericTypeList) {
+ if (FunctionRegistry.implicitConvertable(a, t) &&
+ FunctionRegistry.implicitConvertable(b, t)) {
+ return t;
+ }
+ }
return null;
}
/** Returns whether it is possible to implicitly convert an object of Class from to Class to.
*/
- public static boolean implicitConvertable(Class<?> from, Class<?> to) {
- assert(!from.equals(to));
- // Allow implicit String to Double conversion
- if (from.equals(String.class) && to.equals(Double.class)) {
+ public static boolean implicitConvertable(TypeInfo from, TypeInfo to) {
+ if (from.equals(to)) {
return true;
}
- if (from.equals(String.class) && to.equals(java.sql.Date.class)) {
- return true;
- }
- if (from.equals(java.sql.Date.class) && to.equals(String.class)) {
+ // Allow implicit String to Double conversion
+ if (from.equals(TypeInfoFactory.stringTypeInfo)
+ && to.equals(TypeInfoFactory.doubleTypeInfo)) {
return true;
}
// Void can be converted to any type
- if (from.equals(Void.class)) {
+ if (from.equals(TypeInfoFactory.voidTypeInfo)) {
return true;
}
@@ -276,20 +279,20 @@
/**
* Get the UDF method for the name and argumentClasses.
* @param name the name of the UDF
- * @param argumentClasses
+ * @param argumentTypeInfos
* @return The UDF method
*/
- public static Method getUDFMethod(String name, List<Class<?>> argumentClasses) {
+ public static Method getUDFMethod(String name, List<TypeInfo> argumentTypeInfos) {
Class<? extends UDF> udf = getUDFClass(name);
if (udf == null) return null;
Method udfMethod = null;
try {
- udfMethod = udf.newInstance().getResolver().getEvalMethod(argumentClasses);
+ udfMethod = udf.newInstance().getResolver().getEvalMethod(argumentTypeInfos);
}
catch (AmbiguousMethodException e) {
}
catch (Exception e) {
- throw new RuntimeException("getUDFMethod exception: " + e.getMessage());
+ throw new RuntimeException("Cannot get UDF for " + name + " " + argumentTypeInfos, e);
}
return udfMethod;
}
@@ -297,21 +300,21 @@
/**
* Get the UDAF evaluator for the name and argumentClasses.
* @param name the name of the UDAF
- * @param argumentClasses
+ * @param argumentTypeInfos
* @return The UDAF evaluator
*/
- public static Class<? extends UDAFEvaluator> getUDAFEvaluator(String name, List<Class<?>> argumentClasses) {
+ public static Class<? extends UDAFEvaluator> getUDAFEvaluator(String name, List<TypeInfo> argumentTypeInfos) {
Class<? extends UDAF> udf = getUDAF(name);
if (udf == null) return null;
Class<? extends UDAFEvaluator> evalClass = null;
try {
- evalClass = udf.newInstance().getResolver().getEvaluatorClass(argumentClasses);
+ evalClass = udf.newInstance().getResolver().getEvaluatorClass(argumentTypeInfos);
}
catch (AmbiguousMethodException e) {
}
catch (Exception e) {
- throw new RuntimeException("getUADFEvaluator exception: " + e.getMessage());
+ throw new RuntimeException("Cannot get UDAF for " + name + argumentTypeInfos, e);
}
return evalClass;
}
@@ -320,7 +323,7 @@
* This method is shared between UDFRegistry and UDAFRegistry.
* methodName will be "evaluate" for UDFRegistry, and "aggregate"/"evaluate"/"evaluatePartial" for UDAFRegistry.
*/
- public static <T> Method getMethodInternal(Class<? extends T> udfClass, String methodName, boolean exact, List<Class<?>> argumentClasses) {
+ public static <T> Method getMethodInternal(Class<? extends T> udfClass, String methodName, boolean exact, List<TypeInfo> argumentClasses) {
ArrayList<Method> mlist = new ArrayList<Method>();
@@ -333,7 +336,7 @@
return getMethodInternal(mlist, exact, argumentClasses);
}
- public static Method getUDFMethod(String name, Class<?> ... argumentClasses) {
+ public static Method getUDFMethod(String name, TypeInfo ... argumentClasses) {
return getUDFMethod(name, Arrays.asList(argumentClasses));
}
@@ -361,9 +364,9 @@
}
/**
- * Returns the "aggregate" method of the UDAF.
+ * Returns the "iterate" method of the UDAF.
*/
- public static Method getUDAFMethod(String name, List<Class<?>> argumentClasses) {
+ public static Method getUDAFMethod(String name, List<TypeInfo> argumentClasses) {
Class<? extends UDAF> udaf = getUDAF(name);
if (udaf == null)
return null;
@@ -386,13 +389,13 @@
return FunctionRegistry.getMethodInternal(udaf,
(mode == groupByDesc.Mode.COMPLETE || mode == groupByDesc.Mode.FINAL)
? "terminate" : "terminatePartial", true,
- new ArrayList<Class<?>>() );
+ new ArrayList<TypeInfo>() );
}
/**
* Returns the "aggregate" method of the UDAF.
*/
- public static Method getUDAFMethod(String name, Class<?>... argumentClasses) {
+ public static Method getUDAFMethod(String name, TypeInfo... argumentClasses) {
return getUDAFMethod(name, Arrays.asList(argumentClasses));
}
@@ -436,26 +439,45 @@
*
* @param mlist The list of methods to inspect.
* @param exact Boolean to indicate whether this is an exact match or not.
- * @param argumentClasses The classes for the argument.
+ * @param argumentsPassed The classes for the argument.
* @return The matching method.
*/
public static Method getMethodInternal(ArrayList<Method> mlist, boolean exact,
- List<Class<?>> argumentClasses) {
+ List<TypeInfo> argumentsPassed) {
int leastImplicitConversions = Integer.MAX_VALUE;
Method udfMethod = null;
for(Method m: mlist) {
- Class<?>[] argumentTypeInfos = m.getParameterTypes();
-
- boolean match = (argumentTypeInfos.length == argumentClasses.size());
+ List<TypeInfo> argumentsAccepted = TypeInfoUtils.getParameterTypeInfos(m);
+
+ boolean match = (argumentsAccepted.size() == argumentsPassed.size());
int implicitConversions = 0;
- for(int i=0; i<argumentClasses.size() && match; i++) {
- if (argumentClasses.get(i) == Void.class) continue;
- Class<?> accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]);
- if (accepted.isAssignableFrom(argumentClasses.get(i))) {
+ for(int i=0; i<argumentsPassed.size() && match; i++) {
+ TypeInfo argumentPassed = argumentsPassed.get(i);
+ TypeInfo argumentAccepted = argumentsAccepted.get(i);
+ if (argumentPassed.equals(TypeInfoFactory.voidTypeInfo)) {
+ // passing null matches everything
+ continue;
+ }
+ if (argumentAccepted.equals(TypeInfoFactory.unknownTypeInfo)) {
+ // accepting Object means accepting everything
+ continue;
+ }
+ if (argumentPassed.getCategory().equals(Category.LIST)
+ && argumentAccepted.equals(TypeInfoFactory.unknownListTypeInfo)) {
+ // accepting List means accepting List of everything
+ continue;
+ }
+ if (argumentPassed.getCategory().equals(Category.MAP)
+ && argumentAccepted.equals(TypeInfoFactory.unknownMapTypeInfo)) {
+ // accepting Map means accepting Map of everything
+ continue;
+ }
+ TypeInfo accepted = argumentsAccepted.get(i);
+ if (accepted.equals(argumentsPassed.get(i))) {
// do nothing if match
- } else if (!exact && implicitConvertable(argumentClasses.get(i), accepted)) {
+ } else if (!exact && implicitConvertable(argumentsPassed.get(i), accepted)) {
implicitConversions ++;
} else {
match = false;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Tue Apr 14 22:54:39 2009
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
@@ -35,11 +36,18 @@
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -55,7 +63,9 @@
private static final int NUMROWSESTIMATESIZE = 1000;
transient protected ExprNodeEvaluator[] keyFields;
+ transient protected InspectableObject[] keyInspectableObject;
transient protected ExprNodeEvaluator[][] aggregationParameterFields;
+ transient protected InspectableObject[][] aggregationParameterInspectableObject;
// In the future, we may allow both count(DISTINCT a) and sum(DISTINCT a) in the same SQL clause,
// so aggregationIsDistinct is a boolean array instead of a single number.
transient protected boolean[] aggregationIsDistinct;
@@ -70,6 +80,7 @@
// Used by sort-based GroupBy: Mode = COMPLETE, PARTIAL1, PARTIAL2
transient protected ArrayList<Object> currentKeys;
+ transient protected ArrayList<Object> newKeys;
transient protected UDAFEvaluator[] aggregations;
transient protected Object[][] aggregationsParametersLastInvoke;
@@ -84,6 +95,7 @@
transient int groupbyMapAggrInterval;
transient long numRowsCompareHashAggr;
+
/**
* This is used to store the position and field names for variable length fields.
**/
@@ -124,17 +136,23 @@
// init keyFields
keyFields = new ExprNodeEvaluator[conf.getKeys().size()];
+ keyInspectableObject = new InspectableObject[conf.getKeys().size()];
for (int i = 0; i < keyFields.length; i++) {
keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i));
+ keyInspectableObject[i] = new InspectableObject();
}
-
+ newKeys = new ArrayList<Object>(keyFields.length);
+
// init aggregationParameterFields
aggregationParameterFields = new ExprNodeEvaluator[conf.getAggregators().size()][];
+ aggregationParameterInspectableObject = new InspectableObject[conf.getAggregators().size()][];
for (int i = 0; i < aggregationParameterFields.length; i++) {
ArrayList<exprNodeDesc> parameters = conf.getAggregators().get(i).getParameters();
aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()];
+ aggregationParameterInspectableObject[i] = new InspectableObject[parameters.size()];
for (int j = 0; j < parameters.size(); j++) {
aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory.get(parameters.get(j));
+ aggregationParameterInspectableObject[i][j] = new InspectableObject();
}
}
// init aggregationIsDistinct
@@ -210,8 +228,9 @@
objectInspectors.add(null);
}
for(int i=0; i<aggregationClasses.length; i++) {
- objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(
- aggregationsEvaluateMethods[i].getReturnType()));
+ objectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+ PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveWritableClass(
+ aggregationsEvaluateMethods[i].getReturnType()).primitiveCategory));
}
fieldNames = new ArrayList<String>(objectInspectors.size());
@@ -253,23 +272,26 @@
* @param c the type of the key
* @return the size of this datatype
**/
- private int getSize(int pos, Class<?> c) {
- if (c.isPrimitive() ||
- c.isInstance(new Boolean(true)) ||
- c.isInstance(new Byte((byte)0)) ||
- c.isInstance(new Short((short)0)) ||
- c.isInstance(new Integer(0)) ||
- c.isInstance(new Long(0)) ||
- c.isInstance(new Float(0)) ||
- c.isInstance(new Double(0)))
- return javaSizePrimitiveType;
-
- if (c.isInstance(new String())) {
- keyPositionsSize.add(new Integer(pos));
- return javaObjectOverHead;
+ private int getSize(int pos, PrimitiveCategory category) {
+ switch(category) {
+ case VOID:
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ case FLOAT:
+ case DOUBLE: {
+ return javaSizePrimitiveType;
+ }
+ case STRING: {
+ keyPositionsSize.add(new Integer(pos));
+ return javaObjectOverHead;
+ }
+ default: {
+ return javaSizeUnknownType;
+ }
}
-
- return javaSizeUnknownType;
}
/**
@@ -320,7 +342,7 @@
**/
private int getSize(int pos, TypeInfo typeInfo) {
if (typeInfo instanceof PrimitiveTypeInfo)
- return getSize(pos, typeInfo.getPrimitiveClass());
+ return getSize(pos, ((PrimitiveTypeInfo)typeInfo).getPrimitiveCategory());
return javaSizeUnknownType;
}
@@ -366,8 +388,6 @@
return aggs;
}
- InspectableObject tempInspectableObject = new InspectableObject();
-
protected void updateAggregations(UDAFEvaluator[] aggs, Object row, ObjectInspector rowInspector, boolean hashAggr, boolean newEntry,
Object[][] lastInvoke) throws HiveException {
for(int ai=0; ai<aggs.length; ai++) {
@@ -375,8 +395,9 @@
// Calculate the parameters
Object[] o = new Object[aggregationParameterFields[ai].length];
for(int pi=0; pi<aggregationParameterFields[ai].length; pi++) {
- aggregationParameterFields[ai][pi].evaluate(row, rowInspector, tempInspectableObject);
- o[pi] = tempInspectableObject.o;
+ aggregationParameterFields[ai][pi].evaluate(row, rowInspector, aggregationParameterInspectableObject[ai][pi]);
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector)aggregationParameterInspectableObject[ai][pi].oi;
+ o[pi] = poi.getPrimitiveWritableObject(aggregationParameterInspectableObject[ai][pi].o);
}
// Update the aggregations.
@@ -407,7 +428,13 @@
if (differentParameters) {
FunctionRegistry.invoke(aggregationsAggregateMethods[ai], aggs[ai], o);
- lastInvoke[ai] = o;
+ if (lastInvoke[ai] == null) {
+ lastInvoke[ai] = new Object[o.length];
+ }
+ for (int pi=0; pi<o.length; pi++) {
+ lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject(o[pi],
+ aggregationParameterInspectableObject[ai][pi].oi);
+ }
}
}
}
@@ -437,12 +464,12 @@
try {
// Compute the keys
- ArrayList<Object> newKeys = new ArrayList<Object>(keyFields.length);
+ newKeys.clear();
for (int i = 0; i < keyFields.length; i++) {
- keyFields[i].evaluate(row, rowInspector, tempInspectableObject);
- newKeys.add(tempInspectableObject.o);
+ keyFields[i].evaluate(row, rowInspector, keyInspectableObject[i]);
+ newKeys.add(keyInspectableObject[i].o);
if (firstRow) {
- objectInspectors.set(i, tempInspectableObject.oi);
+ objectInspectors.set(i, keyInspectableObject[i].oi);
}
}
@@ -463,6 +490,19 @@
}
}
+ private static ArrayList<Object> deepCopyElements(InspectableObject[] keys) {
+ ArrayList<Object> result = new ArrayList<Object>(keys.length);
+ deepCopyElements(keys, result);
+ return result;
+ }
+
+ private static void deepCopyElements(InspectableObject[] keys, ArrayList<Object> result) {
+ result.clear();
+ for (int i=0; i<keys.length; i++) {
+ result.add(ObjectInspectorUtils.copyToStandardObject(keys[i].o, keys[i].oi));
+ }
+ }
+
private void processHashAggr(Object row, ObjectInspector rowInspector, ArrayList<Object> newKeys) throws HiveException {
// Prepare aggs for updating
UDAFEvaluator[] aggs = null;
@@ -472,7 +512,7 @@
aggs = hashAggregations.get(newKeys);
if (aggs == null) {
aggs = newAggregations();
- hashAggregations.put(newKeys, aggs);
+ hashAggregations.put(deepCopyElements(keyInspectableObject), aggs);
newEntry = true;
numRowsHashTbl++; // new entry in the hash table
}
@@ -480,7 +520,7 @@
// Update the aggs
updateAggregations(aggs, row, rowInspector, true, newEntry, null);
- // based on used-specified pramaters, check if the hash table needs to be flushed
+ // based on used-specified parameters, check if the hash table needs to be flushed
if (shouldBeFlushed(newKeys)) {
flush(false);
}
@@ -492,15 +532,18 @@
Object[][] lastInvoke = null;
boolean keysAreEqual = newKeys.equals(currentKeys);
- // forward the current keys if needed for sort-based aggregation
+ // Forward the current keys if needed for sort-based aggregation
if (currentKeys != null && !keysAreEqual)
forward(currentKeys, aggregations);
// Need to update the keys?
if (currentKeys == null || !keysAreEqual) {
- currentKeys = newKeys;
+ if (currentKeys == null) {
+ currentKeys = new ArrayList<Object>(keyFields.length);
+ }
+ deepCopyElements(keyInspectableObject, currentKeys);
- // init aggregations
+ // Init aggregations
for(UDAFEvaluator aggregation: aggregations)
aggregation.init();
@@ -528,8 +571,13 @@
for (Integer pos : keyPositionsSize) {
Object key = newKeys.get(pos.intValue());
// Ignore nulls
- if (key != null)
- totalVariableSize += ((String)key).length();
+ if (key != null) {
+ if (key instanceof String) {
+ totalVariableSize += ((String)key).length();
+ } else {
+ totalVariableSize += ((Text)key).getLength();
+ }
+ }
}
UDAFEvaluator[] aggs = null;
@@ -555,9 +603,6 @@
// Update the number of entries that can fit in the hash table
numEntriesHashTable = (int)(maxHashTblMemory / (fixedRowSize + ((int)totalVariableSize/numEntriesVarSize)));
LOG.trace("Hash Aggr: #hash table = " + numEntries + " #max in hash table = " + numEntriesHashTable);
-
- if ((numEntries % (100 * NUMROWSESTIMATESIZE)) == 0)
- LOG.warn("Hash Aggr: #hash table = " + numEntries + " #max in hash table = " + numEntriesHashTable);
}
// flush if necessary
@@ -599,6 +644,8 @@
}
}
+ transient Object[] forwardCache;
+
/**
* Forward a record of keys and aggregation results.
*
@@ -608,19 +655,21 @@
*/
protected void forward(ArrayList<Object> keys, UDAFEvaluator[] aggs) throws HiveException {
int totalFields = keys.size() + aggs.length;
- List<Object> a = new ArrayList<Object>(totalFields);
+ if (forwardCache == null) {
+ forwardCache = new Object[totalFields];
+ }
for(int i=0; i<keys.size(); i++) {
- a.add(keys.get(i));
+ forwardCache[i] = keys.get(i);
}
for(int i=0; i<aggs.length; i++) {
try {
- a.add(aggregationsEvaluateMethods[i].invoke(aggs[i]));
+ forwardCache[keys.size() + i] = aggregationsEvaluateMethods[i].invoke(aggs[i]);
} catch (Exception e) {
throw new HiveException("Unable to execute UDAF function " + aggregationsEvaluateMethods[i] + " "
+ " on object " + "(" + aggs[i] + ") " + ": " + e.getMessage());
}
}
- forward(a, outputObjectInspector);
+ forward(forwardCache, outputObjectInspector);
}
/**
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java Tue Apr 14 22:54:39 2009
@@ -40,6 +40,10 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Reporter;
/**
@@ -112,9 +116,9 @@
static {
aliasField = ExprNodeEvaluatorFactory.get(new exprNodeColumnDesc(
- String.class, Utilities.ReduceField.ALIAS.toString()));
+ TypeInfoFactory.stringTypeInfo, Utilities.ReduceField.ALIAS.toString()));
keyField = ExprNodeEvaluatorFactory.get(new exprNodeColumnDesc(
- String.class, Utilities.ReduceField.KEY.toString()));
+ TypeInfoFactory.stringTypeInfo, Utilities.ReduceField.KEY.toString()));
}
HashMap<Byte, Vector<ArrayList<Object>>> storage;
@@ -156,8 +160,8 @@
ArrayList<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>(
totalSz);
for (int i = 0; i < totalSz; i++) {
- structFieldObjectInspectors.add(ObjectInspectorFactory
- .getStandardPrimitiveObjectInspector(String.class));
+ structFieldObjectInspectors.add(PrimitiveObjectInspectorFactory
+ .writableStringObjectInspector);
}
joinOutputObjectInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(ObjectInspectorUtils
@@ -184,6 +188,8 @@
iterators = new Stack<Iterator<ArrayList<Object>>>();
joinEmitInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEJOINEMITINTERVAL);
+
+ forwardCache = new Object[totalSz];
}
public void startGroup() throws HiveException {
@@ -200,7 +206,7 @@
try {
// get alias
aliasField.evaluate(row, rowInspector, tempAliasInspectableObject);
- Byte alias = (Byte) (tempAliasInspectableObject.o);
+ Byte alias = (Byte) ((PrimitiveObjectInspector)tempAliasInspectableObject.oi).getPrimitiveJavaObject(tempAliasInspectableObject.o);
// get the expressions for that alias
JoinExprMap exmap = joinExprs.get(alias);
@@ -210,7 +216,7 @@
ArrayList<Object> nr = new ArrayList<Object>(valueFields.length);
for (ExprNodeEvaluator vField : valueFields) {
vField.evaluate(row, rowInspector, tempAliasInspectableObject);
- nr.add(tempAliasInspectableObject.o);
+ nr.add(ObjectInspectorUtils.copyToStandardObject(tempAliasInspectableObject.o, tempAliasInspectableObject.oi));
}
// Are we consuming too much memory
@@ -245,24 +251,26 @@
}
}
+ transient Object[] forwardCache;
+
private void createForwardJoinObject(IntermediateObject intObj,
boolean[] nullsArr) throws HiveException {
- ArrayList<Object> nr = new ArrayList<Object>(totalSz);
+ int p = 0;
for (int i = 0; i < numValues; i++) {
Byte alias = order[i];
int sz = joinExprs.get(alias).getValueFields().length;
if (nullsArr[i]) {
for (int j = 0; j < sz; j++) {
- nr.add(null);
+ forwardCache[p++] = null;
}
} else {
ArrayList<Object> obj = intObj.getObjs()[i];
for (int j = 0; j < sz; j++) {
- nr.add(obj.get(j));
+ forwardCache[p++] = obj.get(j);
}
}
}
- forward(nr, joinOutputObjectInspector);
+ forward(forwardCache, joinOutputObjectInspector);
}
private void copyOldArray(boolean[] src, boolean[] dest) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=764994&r1=764993&r2=764994&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Tue Apr 14 22:54:39 2009
@@ -22,6 +22,7 @@
import java.io.*;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.conf.Configuration;
@@ -38,6 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
/**
@@ -58,7 +60,7 @@
transient private StructObjectInspector rowObjectInspector;
transient private List<String> partNames;
- transient private List<String> partValues;
+ transient private Object[] partValues;
transient private List<ObjectInspector> partObjectInspectors;
@@ -110,15 +112,16 @@
// the serdes for the partition columns
String pcols = p.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
if (pcols != null && pcols.length() > 0) {
- partNames = new ArrayList<String>();
- partValues = new ArrayList<String>();
- partObjectInspectors = new ArrayList<ObjectInspector>();
String[] partKeys = pcols.trim().split("/");
- for(String key: partKeys) {
+ partNames = new ArrayList<String>(partKeys.length);
+ partValues = new Object[partKeys.length];
+ partObjectInspectors = new ArrayList<ObjectInspector>(partKeys.length);
+ for(int i = 0; i < partKeys.length; i++ ) {
+ String key = partKeys[i];
partNames.add(key);
- partValues.add(partSpec.get(key));
+ partValues[i] = new Text(partSpec.get(key));
partObjectInspectors.add(
- ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
+ PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);