You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/10/23 22:50:43 UTC

svn commit: r1535174 [1/8] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ common/src/java/conf/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/ hc...

Author: gunther
Date: Wed Oct 23 20:50:38 2013
New Revision: 1535174

URL: http://svn.apache.org/r1535174
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Added:
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedExpressions.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedExpressions.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterColAndScalar.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterColAndScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterColOrScalar.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterColOrScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterScalarAndColumn.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterScalarAndColumn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterScalarOrColumn.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterScalarOrColumn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRandNoSeed.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRandNoSeed.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
      - copied unchanged from r1535144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_exists_implicit_gby.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_exists_implicit_gby.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_in_groupby.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_in_groupby.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_in_select.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_in_select.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_multiple_cols_in_select.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_multiple_cols_in_select.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_notexists_implicit_gby.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_notexists_implicit_gby.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_subquery_chain.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_subquery_chain.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
    hive/branches/tez/ql/src/test/queries/clientnegative/subquery_with_or_cond.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientnegative/subquery_with_or_cond.q
    hive/branches/tez/ql/src/test/queries/clientpositive/subquery_exists.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/subquery_exists.q
    hive/branches/tez/ql/src/test/queries/clientpositive/subquery_in.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/subquery_in.q
    hive/branches/tez/ql/src/test/queries/clientpositive/subquery_multiinsert.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/subquery_multiinsert.q
    hive/branches/tez/ql/src/test/queries/clientpositive/subquery_notexists.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/subquery_notexists.q
    hive/branches/tez/ql/src/test/queries/clientpositive/subquery_notin.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/subquery_notin.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_0.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_0.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_1.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_1.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_10.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_10.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_11.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_11.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_12.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_12.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_13.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_13.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_14.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_14.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_15.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_15.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_16.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_16.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_2.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_2.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_3.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_3.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_4.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_4.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_5.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_5.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_6.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_6.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_7.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_7.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_8.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_8.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_9.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_9.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorization_not.q
      - copied unchanged from r1535144, hive/trunk/ql/src/test/queries/clientpositive/vectorization_not.q
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_exists_implicit_gby.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_exists_implicit_gby.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_in_groupby.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_in_groupby.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_in_select.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_in_select.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_multiple_cols_in_select.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_multiple_cols_in_select.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_notexists_implicit_gby.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_notexists_implicit_gby.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_subquery_chain.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_subquery_chain.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/subquery_with_or_cond.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientnegative/subquery_with_or_cond.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_exists.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/subquery_exists.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_in.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/subquery_in.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_notexists.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/subquery_notexists.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_notin.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_0.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_0.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_1.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_10.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_10.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_11.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_12.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_12.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_13.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_13.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_14.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_14.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_15.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_16.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_16.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_2.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_3.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_4.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_5.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_6.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_6.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_7.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_7.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_8.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_8.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_9.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_9.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_not.q.out
      - copied unchanged from r1535144, hive/trunk/ql/src/test/results/clientpositive/vectorization_not.q.out
Removed:
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterNotExpr.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java
    hive/branches/tez/common/src/java/conf/hive-log4j.properties
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatInputFormatReader.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
    hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
    hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/inputoutput.xml
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/GroupByAge.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadJson.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadRC.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadText.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadWrite.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SimpleRead.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreComplex.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreDemo.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreNumbers.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SumNumbers.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/TypeDataCheck.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteJson.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteRC.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteText.java
    hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
    hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalar.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareScalar.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryFunc.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareScalar.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterScalarCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareScalar.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ScalarCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareScalar.txt
    hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareColumn.txt
    hive/branches/tez/ql/src/gen/vectorization/TestTemplates/TestColumnScalarFilterVectorExpressionEvaluation.txt
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ColAndCol.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ColOrCol.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterExprAndExpr.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterExprOrExpr.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColLikeStringScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLogWithBaseDoubleToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLogWithBaseLongToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToString.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncPowerDoubleToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncPowerLongToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRand.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IdentityExpression.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IsNotNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IsNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongToStringUnaryUDF.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NotCol.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/PosModDoubleToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/PosModLongToLong.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/RoundWithNumDigitsDoubleToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/SelectColumnIsFalse.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/SelectColumnIsNotNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/SelectColumnIsNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/SelectColumnIsTrue.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColCol.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColScalar.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatScalarCol.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringLength.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStart.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStartLen.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDF.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDFDirect.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldLong.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDegrees.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSign.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNot.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorLogicalExpressions.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
    hive/branches/tez/ql/src/test/queries/clientpositive/groupby2_map_multi_distinct.q
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/count.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby2_map_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_distinct_samekey.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_sort_11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join18_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/limit_pushdown.q.out

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1534899-1535144

Modified: hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java (original)
+++ hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java Wed Oct 23 20:50:38 2013
@@ -134,9 +134,11 @@ public class GenVectorTestCode {
       if(op1IsCol){
         testCase = testCase.replaceAll("<Operand1>","inputColumnVector.vector[i]");
         testCase = testCase.replaceAll("<Operand2>","scalarValue");
+        testCase = testCase.replaceAll("<ConstructorParams>","0, scalarValue");
       }else{
         testCase = testCase.replaceAll("<Operand1>","scalarValue");
         testCase = testCase.replaceAll("<Operand2>","inputColumnVector.vector[i]");
+        testCase = testCase.replaceAll("<ConstructorParams>","scalarValue, 0");
       }
 
       testsuites.get(template).append(testCase);

Modified: hive/branches/tez/common/src/java/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/conf/hive-log4j.properties?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/conf/hive-log4j.properties (original)
+++ hive/branches/tez/common/src/java/conf/hive-log4j.properties Wed Oct 23 20:50:38 2013
@@ -16,7 +16,7 @@
 
 # Define some default values that can be overridden by system properties
 hive.log.threshold=ALL
-hive.root.logger=WARN,DRFA
+hive.root.logger=INFO,DRFA
 hive.log.dir=/tmp/${user.name}
 hive.log.file=hive.log
 

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatInputFormatReader.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatInputFormatReader.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatInputFormatReader.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatInputFormatReader.java Wed Oct 23 20:50:38 2013
@@ -63,7 +63,7 @@ public class HCatInputFormatReader exten
     try {
       Job job = new Job(conf);
       HCatInputFormat hcif = HCatInputFormat.setInput(
-        job, re.getDbName(), re.getTableName()).setFilter(re.getFilterString());
+        job, re.getDbName(), re.getTableName(), re.getFilterString());
       ReaderContext cntxt = new ReaderContext();
       cntxt.setInputSplits(hcif.getSplits(
           ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null)));

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java Wed Oct 23 20:50:38 2013
@@ -39,28 +39,33 @@ public class HCatInputFormat extends HCa
   private InputJobInfo inputJobInfo;
 
   /**
-   * @deprecated as of release 0.5, and will be removed in a future release
+   * Initializes the input with a null filter.
+   * See {@link #setInput(org.apache.hadoop.conf.Configuration, String, String, String)}
    */
-  @Deprecated
-  public static void setInput(Job job, InputJobInfo inputJobInfo) throws IOException {
-    setInput(job.getConfiguration(), inputJobInfo);
+  public static HCatInputFormat setInput(
+          Job job, String dbName, String tableName)
+    throws IOException {
+    return setInput(job.getConfiguration(), dbName, tableName, null);
   }
 
   /**
-   * @deprecated as of release 0.5, and will be removed in a future release
+   * Initializes the input with a provided filter.
+   * See {@link #setInput(org.apache.hadoop.conf.Configuration, String, String, String)}
    */
-  @Deprecated
-  public static void setInput(Configuration conf, InputJobInfo inputJobInfo) throws IOException {
-    setInput(conf, inputJobInfo.getDatabaseName(), inputJobInfo.getTableName())
-      .setFilter(inputJobInfo.getFilter())
-      .setProperties(inputJobInfo.getProperties());
+  public static HCatInputFormat setInput(
+          Job job, String dbName, String tableName, String filter)
+    throws IOException {
+    return setInput(job.getConfiguration(), dbName, tableName, filter);
   }
 
   /**
-   * See {@link #setInput(org.apache.hadoop.conf.Configuration, String, String)}
+   * Initializes the input with a null filter.
+   * See {@link #setInput(org.apache.hadoop.conf.Configuration, String, String, String)}
    */
-  public static HCatInputFormat setInput(Job job, String dbName, String tableName) throws IOException {
-    return setInput(job.getConfiguration(), dbName, tableName);
+  public static HCatInputFormat setInput(
+          Configuration conf, String dbName, String tableName)
+    throws IOException {
+    return setInput(conf, dbName, tableName, null);
   }
 
   /**
@@ -69,9 +74,11 @@ public class HCatInputFormat extends HCa
    * @param conf the job configuration
    * @param dbName database name, which if null 'default' is used
    * @param tableName table name
+   * @param filter the partition filter to use, can be null for no filter
    * @throws IOException on all errors
    */
-  public static HCatInputFormat setInput(Configuration conf, String dbName, String tableName)
+  public static HCatInputFormat setInput(
+          Configuration conf, String dbName, String tableName, String filter)
     throws IOException {
 
     Preconditions.checkNotNull(conf, "required argument 'conf' is null");
@@ -79,7 +86,7 @@ public class HCatInputFormat extends HCa
 
     HCatInputFormat hCatInputFormat = new HCatInputFormat();
     hCatInputFormat.conf = conf;
-    hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, null, null);
+    hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null);
 
     try {
       InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo);
@@ -91,11 +98,11 @@ public class HCatInputFormat extends HCa
   }
 
   /**
-   * Set a filter on the input table.
-   * @param filter the filter specification, which may be null
-   * @return this
-   * @throws IOException on all errors
+   * @deprecated As of 0.13
+   * Use {@link #setInput(org.apache.hadoop.conf.Configuration, String, String, String)} instead,
+   * to specify a partition filter to directly initialize the input with.
    */
+  @Deprecated
   public HCatInputFormat setFilter(String filter) throws IOException {
     // null filters are supported to simplify client code
     if (filter != null) {

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java Wed Oct 23 20:50:38 2013
@@ -341,7 +341,7 @@ public abstract class HCatMapReduceTest 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    HCatInputFormat.setInput(job, dbName, tableName).setFilter(filter);
+    HCatInputFormat.setInput(job, dbName, tableName, filter);
 
     job.setMapOutputKeyClass(BytesWritable.class);
     job.setMapOutputValueClass(Text.class);

Modified: hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java (original)
+++ hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java Wed Oct 23 20:50:38 2013
@@ -116,7 +116,7 @@ public class HCatLoader extends HCatBase
       }
     } else {
       Job clone = new Job(job.getConfiguration());
-      HCatInputFormat.setInput(job, dbName, tableName).setFilter(getPartitionFilterString());
+      HCatInputFormat.setInput(job, dbName, tableName, getPartitionFilterString());
 
       // We will store all the new /changed properties in the job in the
       // udf context, so the the HCatInputFormat.setInput method need not

Modified: hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/inputoutput.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/inputoutput.xml?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/inputoutput.xml (original)
+++ hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/inputoutput.xml Wed Oct 23 20:50:38 2013
@@ -45,9 +45,7 @@
 	    <li><code>getTableSchema</code></li>
 	</ul>
 
-	<p>To use HCatInputFormat to read data, first instantiate an <code>InputJobInfo</code>
-	with the necessary information from the table being read
-	and then call setInput with the <code>InputJobInfo</code>.</p>
+	<p>To use HCatInputFormat to read data, call setInput with the database name, tablename and an optional partition filter.</p>
 
 <p>You can use the <code>setOutputSchema</code> method to include a projection schema, to
 specify the output fields. If a schema is not specified, all the columns in the table
@@ -62,11 +60,13 @@ will be returned.</p>
    * the information in the conf object. The inputInfo object is updated with
    * information needed in the client context
    * @param job the job object
-   * @param inputJobInfo the input info for table to read
+   * @param dbName the database where the table lies
+   * @param tableName the table to read
+   * @param filter the partition filter to use
    * @throws IOException the exception in communicating with the metadata server
    */
   public static void setInput(Job job,
-      InputJobInfo inputJobInfo) throws IOException;
+      String dbName, String tableName, String filter) throws IOException;
 
   /**
    * Set the schema for the HCatRecord data returned by HCatInputFormat.
@@ -354,8 +354,8 @@ public class GroupByAge extends Configur
         String dbName = null;
 
         Job job = new Job(conf, "GroupByAge");
-        HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null));
+        HCatInputFormat.setInput(job, dbName,
+                inputTableName);
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);
@@ -388,8 +388,8 @@ public class GroupByAge extends Configur
 <li>The implementation of Map takes HCatRecord as an input and the implementation of Reduce produces it as an output.</li>
 <li>This example program assumes the schema of the input, but it could also retrieve the schema via
 HCatOutputFormat.getOutputSchema() and retrieve fields based on the results of that call.</li>
-<li>The input descriptor for the table to be read is created by calling InputJobInfo.create.  It requires the database name,
-table name, and partition filter.  In this example the partition filter is null, so all partitions of the table
+<li>The input descriptor for the table to be read is created by passing the database name,
+table name, and an optional partition filter to HCatInputFormat.setInput. In this example the partition filter is null, so all partitions of the table
 will be read.</li>
 <li>The output descriptor for the table to be written is created by calling OutputJobInfo.create.  It requires the
 database name, the table name, and a Map of partition keys and values that describe the partition being written.
@@ -397,7 +397,7 @@ In this example it is assumed the table 
 </ol>
 
 <p>To scan just selected partitions of a table, a filter describing the desired partitions can be passed to
-InputJobInfo.create.  To scan a single partition, the filter string should look like: "<code>ds=20120401</code>"
+HCatInputFormat.setInput.  To scan a single partition, the filter string should look like: "<code>ds=20120401</code>"
 where the datestamp "<code>ds</code>" is the partition column name and "<code>20120401</code>" is the value
 you want to read (year, month, and day).</p>
 </section>
@@ -420,14 +420,14 @@ you want to read (year, month, and day).
 
 <p>Assume for example you have a web_logs table that is partitioned by the column "<code>ds</code>".  You could select one partition of the table by changing</p>
 <source>
-HCatInputFormat.setInput(job, InputJobInfo.create(dbName, inputTableName, null));
+HCatInputFormat.setInput(job, dbName, inputTableName);
 </source>
 <p>
 to
 </p>
 <source>
 HCatInputFormat.setInput(job,
-                         InputJobInfo.create(dbName, inputTableName, "ds=\"20110924\""));
+                         dbName, inputTableName, "ds=\"20110924\"");
 </source>
 <p>
 This filter must reference only partition columns.  Values from other columns will cause the job to fail.</p>

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java Wed Oct 23 20:50:38 2013
@@ -41,7 +41,6 @@ import org.apache.hcatalog.data.DefaultH
 import org.apache.hcatalog.data.HCatRecord;
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -165,8 +164,7 @@ public class HBaseReadWrite extends Conf
     if (!succ) return 1;
 
     job = new Job(conf, "HBaseRead");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName, tableName,
-      null));
+    HCatInputFormat.setInput(job, dbName, tableName);
 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/GroupByAge.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/GroupByAge.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/GroupByAge.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/GroupByAge.java Wed Oct 23 20:50:38 2013
@@ -38,7 +38,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -105,8 +104,8 @@ public class GroupByAge extends Configur
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "GroupByAge");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, null));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadJson.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadJson.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadJson.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadJson.java Wed Oct 23 20:50:38 2013
@@ -37,7 +37,6 @@ import org.apache.hive.hcatalog.common.H
 import org.apache.hive.hcatalog.data.DefaultHCatRecord;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"
@@ -90,8 +89,8 @@ public class ReadJson extends Configured
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadJson");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadRC.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadRC.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadRC.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadRC.java Wed Oct 23 20:50:38 2013
@@ -37,7 +37,6 @@ import org.apache.hive.hcatalog.common.H
 import org.apache.hive.hcatalog.data.DefaultHCatRecord;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"
@@ -91,8 +90,8 @@ public class ReadRC extends Configured i
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadRC");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadText.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadText.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadText.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadText.java Wed Oct 23 20:50:38 2013
@@ -37,7 +37,6 @@ import org.apache.hive.hcatalog.common.H
 import org.apache.hive.hcatalog.data.DefaultHCatRecord;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"
@@ -102,8 +101,8 @@ public class ReadText extends Configured
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadText");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadWrite.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadWrite.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadWrite.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/ReadWrite.java Wed Oct 23 20:50:38 2013
@@ -36,7 +36,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -86,8 +85,8 @@ public class ReadWrite extends Configure
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadWrite");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, null));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SimpleRead.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SimpleRead.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SimpleRead.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SimpleRead.java Wed Oct 23 20:50:38 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.util.ToolRunner
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"
@@ -87,8 +86,8 @@ public class SimpleRead extends Configur
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "SimpleRead");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName, null);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreComplex.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreComplex.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreComplex.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreComplex.java Wed Oct 23 20:50:38 2013
@@ -36,7 +36,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -103,8 +102,8 @@ public class StoreComplex {
     Job job = new Job(conf, "storecomplex");
     // initialize HCatInputFormat
 
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
       dbName, outputTableName, outputPartitionKvps));

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreDemo.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreDemo.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreDemo.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreDemo.java Wed Oct 23 20:50:38 2013
@@ -35,7 +35,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -114,8 +113,8 @@ public class StoreDemo {
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "storedemo");
     // initialize HCatInputFormat
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
       dbName, outputTableName, outputPartitionKvps));

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreNumbers.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreNumbers.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreNumbers.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/StoreNumbers.java Wed Oct 23 20:50:38 2013
@@ -39,7 +39,6 @@ import org.apache.hive.hcatalog.data.sch
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -178,8 +177,8 @@ public class StoreNumbers {
     Job job = new Job(conf, "storenumbers");
 
     // initialize HCatInputFormat
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
       dbName, outputTableName, outputPartitionKvps));

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SumNumbers.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SumNumbers.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SumNumbers.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/SumNumbers.java Wed Oct 23 20:50:38 2013
@@ -41,7 +41,6 @@ import org.apache.hadoop.util.GenericOpt
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"
@@ -162,8 +161,8 @@ public class SumNumbers {
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "sumnumbers");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-      dbName, tableName, null));
+    HCatInputFormat.setInput(job,
+      dbName, tableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/TypeDataCheck.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/TypeDataCheck.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/TypeDataCheck.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/TypeDataCheck.java Wed Oct 23 20:50:38 2013
@@ -37,7 +37,6 @@ import org.apache.hive.hcatalog.common.H
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat that checks that the columns
@@ -150,8 +149,8 @@ public class TypeDataCheck implements To
       }
       Job job = new Job(conf, "typedatacheck");
       // initialize HCatInputFormat
-      HCatInputFormat.setInput(job, InputJobInfo.create(
-        dbName, tableName, null));
+      HCatInputFormat.setInput(job,
+        dbName, tableName);
       HCatSchema s = HCatInputFormat.getTableSchema(job);
       job.getConfiguration().set(SCHEMA_KEY, schemaStr);
       job.getConfiguration().set(DELIM, outputdelim);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteJson.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteJson.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteJson.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteJson.java Wed Oct 23 20:50:38 2013
@@ -35,7 +35,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -91,8 +90,8 @@ public class WriteJson extends Configure
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "WriteJson");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, null));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteRC.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteRC.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteRC.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteRC.java Wed Oct 23 20:50:38 2013
@@ -35,7 +35,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -93,8 +92,8 @@ public class WriteRC extends Configured 
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "WriteRC");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, null));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteText.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteText.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteText.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteText.java Wed Oct 23 20:50:38 2013
@@ -35,7 +35,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -103,8 +102,8 @@ public class WriteText extends Configure
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "WriteText");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, null));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java Wed Oct 23 20:50:38 2013
@@ -39,7 +39,6 @@ import org.apache.hive.hcatalog.data.sch
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 
 /**
@@ -94,8 +93,8 @@ public class WriteTextPartitioned extend
     if (principalID != null)
       conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "WriteTextPartitioned");
-    HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-      inputTableName, filter));
+    HCatInputFormat.setInput(job, dbName,
+      inputTableName, filter);
     // initialize HCatOutputFormat
 
     job.setInputFormatClass(HCatInputFormat.class);

Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java Wed Oct 23 20:50:38 2013
@@ -62,7 +62,6 @@ import org.apache.hive.hcatalog.data.HCa
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.junit.Test;
 
 public class TestHBaseInputFormat extends SkeletonHBaseTest {
@@ -160,9 +159,7 @@ public class TestHBaseInputFormat extend
     MapReadHTable.resetCounters();
 
     job.setInputFormatClass(HCatInputFormat.class);
-    InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName,
-                null);
-    HCatInputFormat.setInput(job, inputJobInfo);
+    HCatInputFormat.setInput(job, databaseName, tableName);
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, outputDir);
     job.setMapOutputKeyClass(BytesWritable.class);
@@ -225,10 +222,9 @@ public class TestHBaseInputFormat extend
     job.setJarByClass(this.getClass());
     job.setMapperClass(MapReadProjHTable.class);
     job.setInputFormatClass(HCatInputFormat.class);
-    InputJobInfo inputJobInfo = InputJobInfo.create(
-      MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
     HCatInputFormat.setOutputSchema(job, getProjectionSchema());
-    HCatInputFormat.setInput(job, inputJobInfo);
+    HCatInputFormat.setInput(job,
+      MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, outputDir);
     job.setMapOutputKeyClass(BytesWritable.class);

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed Oct 23 20:50:38 2013
@@ -1993,6 +1993,7 @@ public class HiveMetaStore extends Thrif
             } else {
               assert (partPath != null);
               wh.deleteDir(partPath, true);
+              deleteParentRecursive(partPath.getParent(), part_vals.size() - 1);
             }
             // ok even if the data is not deleted
           }
@@ -2007,6 +2008,13 @@ public class HiveMetaStore extends Thrif
       return true;
     }
 
+    private void deleteParentRecursive(Path parent, int depth) throws IOException, MetaException {
+      if (depth > 0 && parent != null && wh.isWritable(parent) && wh.isEmpty(parent)) {
+        wh.deleteDir(parent, true);
+        deleteParentRecursive(parent.getParent(), depth - 1);
+      }
+    }
+
     @Override
     public boolean drop_partition(final String db_name, final String tbl_name,
         final List<String> part_vals, final boolean deleteData)

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Wed Oct 23 20:50:38 2013
@@ -39,6 +39,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -222,6 +223,14 @@ public class Warehouse {
     return fsHandler.deleteDir(fs, f, recursive, conf);
   }
 
+  public boolean isEmpty(Path path) throws IOException, MetaException {
+    ContentSummary contents = getFs(path).getContentSummary(path);
+    if (contents != null && contents.getFileCount() == 0 && contents.getDirectoryCount() == 1) {
+      return true;
+    }
+    return false;
+  }
+
   public boolean isWritable(Path path) throws IOException {
     if (!storageAuthCheck) {
       // no checks for non-secure hadoop installations

Modified: hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java (original)
+++ hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java Wed Oct 23 20:50:38 2013
@@ -86,12 +86,13 @@ public class TestMetastoreVersion extend
     assertFalse(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_AUTO_CREATE_SCHEMA));
     assertTrue(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_FIXED_DATASTORE));
 
-    SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
-    // driver execution should fail since the schema didn't get created
-    CommandProcessorResponse proc = driver.run("show tables");
-    assertFalse(proc.getResponseCode() == 0);
-   }
+    // session creation should fail since the schema didn't get created
+    try {
+      SessionState.start(new CliSessionState(hiveConf));
+    } catch (RuntimeException re) {
+      assertTrue(re.getCause().getCause() instanceof MetaException);
+    }
+  }
 
   /***
    * Test that with no verification, hive populates the schema and version correctly

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt Wed Oct 23 20:50:38 2013
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template ColumnArithmeticColumn.txt, which covers binary arithmetic 
@@ -154,4 +155,18 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+  
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalar.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalar.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalar.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalar.txt Wed Oct 23 20:50:38 2013
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template ColumnArithmeticScalar.txt, which covers binary arithmetic 
@@ -131,4 +132,18 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareColumn.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareColumn.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareColumn.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareColumn.txt Wed Oct 23 20:50:38 2013
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template ColumnArithmeticColumn.txt, which covers binary arithmetic 
@@ -154,4 +155,18 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareScalar.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareScalar.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareScalar.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnCompareScalar.txt Wed Oct 23 20:50:38 2013
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
 import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template ColumnCompareScalar.txt, which covers binary comparison 
@@ -146,4 +147,18 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryFunc.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryFunc.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryFunc.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryFunc.txt Wed Oct 23 20:50:38 2013
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 public class <ClassName> extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -119,4 +120,16 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt Wed Oct 23 20:50:38 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.v
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template ColumnUnaryMinus.txt, which covers unary negation operator. 
@@ -121,4 +122,16 @@ public class <ClassName> extends VectorE
   public void setOutputColumn(int outputColumn) {
     this.outputColumn = outputColumn;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
 }

Modified: hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareColumn.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareColumn.txt?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareColumn.txt (original)
+++ hive/branches/tez/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnCompareColumn.txt Wed Oct 23 20:50:38 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.v
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
  * Generated from template FilterColumnCompareColumn.txt, which covers binary comparison 
@@ -251,4 +252,18 @@ public class <ClassName> extends VectorE
   public void setColNum2(int colNum2) {
     this.colNum2 = colNum2;
   }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
 }