You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/10/29 18:25:59 UTC

svn commit: r1536823 [1/8] - in /hive/branches/tez: ./ cli/src/test/org/apache/hadoop/hive/cli/ common/src/java/conf/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/h...

Author: gunther
Date: Tue Oct 29 17:25:55 2013
New Revision: 1536823

URL: http://svn.apache.org/r1536823
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Added:
    hive/branches/tez/common/src/java/org/apache/hive/common/util/HiveTestUtils.java
      - copied unchanged from r1536810, hive/trunk/common/src/java/org/apache/hive/common/util/HiveTestUtils.java
    hive/branches/tez/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
      - copied unchanged from r1536810, hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/branches/tez/data/files/kv9.txt
      - copied unchanged from r1536810, hive/trunk/data/files/kv9.txt
    hive/branches/tez/hcatalog/webhcat/svr/src/main/config/override-container-log4j.properties
      - copied unchanged from r1536810, hive/trunk/hcatalog/webhcat/svr/src/main/config/override-container-log4j.properties
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
      - copied unchanged from r1536810, hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
      - copied unchanged from r1536810, hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFOPMod.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFOPMod.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFPosMod.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFPosMod.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFBridge.java
      - copied unchanged from r1536810, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFBridge.java
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_5.q
      - copied unchanged from r1536810, hive/trunk/ql/src/test/queries/clientpositive/decimal_5.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_6.q
      - copied unchanged from r1536810, hive/trunk/ql/src/test/queries/clientpositive/decimal_6.q
    hive/branches/tez/ql/src/test/queries/clientpositive/join_merging.q
      - copied unchanged from r1536810, hive/trunk/ql/src/test/queries/clientpositive/join_merging.q
    hive/branches/tez/ql/src/test/queries/clientpositive/partition_varchar2.q
      - copied unchanged from r1536810, hive/trunk/ql/src/test/queries/clientpositive/partition_varchar2.q
    hive/branches/tez/ql/src/test/queries/clientpositive/vectorized_shufflejoin.q
      - copied unchanged from r1536810, hive/trunk/ql/src/test/queries/clientpositive/vectorized_shufflejoin.q
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_5.q.out
      - copied unchanged from r1536810, hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_6.q.out
      - copied unchanged from r1536810, hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join_merging.q.out
      - copied unchanged from r1536810, hive/trunk/ql/src/test/results/clientpositive/join_merging.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/partition_varchar2.q.out
      - copied unchanged from r1536810, hive/trunk/ql/src/test/results/clientpositive/partition_varchar2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out
      - copied unchanged from r1536810, hive/trunk/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
      - copied unchanged from r1536810, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
      - copied unchanged from r1536810, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
    hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java
      - copied unchanged from r1536810, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java
    hive/branches/tez/service/src/test/org/apache/hive/service/cli/TestScratchDir.java
      - copied unchanged from r1536810, hive/trunk/service/src/test/org/apache/hive/service/cli/TestScratchDir.java
Removed:
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/DynamicMultiDimeCollectionTest.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPrunnerTest.java
    hive/branches/tez/ql/src/test/queries/clientnegative/script_broken_pipe1.q
    hive/branches/tez/ql/src/test/results/clientnegative/script_broken_pipe1.q.out
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/.gitignore
    hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
    hive/branches/tez/common/src/java/conf/hive-log4j.properties
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
    hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
    hive/branches/tez/common/src/test/resources/hive-exec-log4j-test.properties
    hive/branches/tez/common/src/test/resources/hive-log4j-test.properties
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
    hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt
    hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
    hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
    hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
    hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HiveJobIDParser.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JarJobIDParser.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/PigJobIDParser.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
    hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTrivialExecService.java
    hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
    hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
    hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
    hive/branches/tez/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/branches/tez/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
    hive/branches/tez/ql/src/java/conf/hive-exec-log4j.properties
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataPrettyFormatUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_1.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_2.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_3.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_4.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_join.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_precision.q
    hive/branches/tez/ql/src/test/queries/clientpositive/decimal_udf.q
    hive/branches/tez/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q
    hive/branches/tez/ql/src/test/queries/clientpositive/ptf_decimal.q
    hive/branches/tez/ql/src/test/queries/clientpositive/serde_regex.q
    hive/branches/tez/ql/src/test/queries/clientpositive/udf_pmod.q
    hive/branches/tez/ql/src/test/queries/clientpositive/udf_to_double.q
    hive/branches/tez/ql/src/test/queries/clientpositive/udf_to_float.q
    hive/branches/tez/ql/src/test/queries/clientpositive/udf_to_string.q
    hive/branches/tez/ql/src/test/queries/clientpositive/windowing_expressions.q
    hive/branches/tez/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
    hive/branches/tez/ql/src/test/queries/clientpositive/windowing_navfn.q
    hive/branches/tez/ql/src/test/queries/clientpositive/windowing_ntile.q
    hive/branches/tez/ql/src/test/queries/clientpositive/windowing_rank.q
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/wrong_column_type.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_join.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_precision.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_serde.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/literal_decimal.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ptf_decimal.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/serde_regex.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf7.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_pmod.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_to_double.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_to_float.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_to_string.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/vectorization_14.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing_expressions.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing_multipartitioning.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing_navfn.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing_ntile.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing_rank.q.out
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveDecimalObjectInspector.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIService.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
    hive/branches/tez/service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java
    hive/branches/tez/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
    hive/branches/tez/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
    hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
    hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1535641-1536810

Modified: hive/branches/tez/.gitignore
URL: http://svn.apache.org/viewvc/hive/branches/tez/.gitignore?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/.gitignore (original)
+++ hive/branches/tez/.gitignore Tue Oct 29 17:25:55 2013
@@ -13,7 +13,8 @@ common/src/gen
 *.iml
 *.ipr
 *.iws
-ql/derby.log
 derby.log
+datanucleus.log
 .arc
-ql/TempStatsStore
+TempStatsStore/
+target/

Modified: hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java (original)
+++ hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java Tue Oct 29 17:25:55 2013
@@ -156,23 +156,22 @@ public class TestCliDriverMethods extend
       historyFile.delete();
     }
     HiveConf configuration = new HiveConf();
-    CliSessionState ss = new CliSessionState(configuration);
-    CliSessionState.start(ss);
-    String[] args = {};
+    configuration.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
     PrintStream oldOut = System.out;
     ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
     System.setOut(new PrintStream(dataOut));
-
     PrintStream oldErr = System.err;
     ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
     System.setErr(new PrintStream(dataErr));
-
+    CliSessionState ss = new CliSessionState(configuration);
+    CliSessionState.start(ss);
+    String[] args = {};
 
     try {
       new FakeCliDriver().run(args);
-      assertTrue(dataOut.toString().contains("test message"));
-      assertTrue(dataErr.toString().contains("Hive history file="));
-      assertTrue(dataErr.toString().contains("File: fakeFile is not a file."));
+      assertTrue(dataOut.toString(), dataOut.toString().contains("test message"));
+      assertTrue(dataErr.toString(), dataErr.toString().contains("Hive history file="));
+      assertTrue(dataErr.toString(), dataErr.toString().contains("File: fakeFile is not a file."));
       dataOut.reset();
       dataErr.reset();
 

Modified: hive/branches/tez/common/src/java/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/conf/hive-log4j.properties?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/conf/hive-log4j.properties (original)
+++ hive/branches/tez/common/src/java/conf/hive-log4j.properties Tue Oct 29 17:25:55 2013
@@ -17,7 +17,7 @@
 # Define some default values that can be overridden by system properties
 hive.log.threshold=ALL
 hive.root.logger=INFO,DRFA
-hive.log.dir=/tmp/${user.name}
+hive.log.dir=${java.io.tmpdir}/${user.name}
 hive.log.file=hive.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/LogUtils.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/LogUtils.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/LogUtils.java Tue Oct 29 17:25:55 2013
@@ -89,8 +89,11 @@ public class LogUtils {
         // property speficied file found in local file system
         // use the specified file
         if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
-          System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
-            HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
+          String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
+          if(queryId == null || (queryId = queryId.trim()).isEmpty()) {
+            queryId = "unknown-" + System.currentTimeMillis();
+          }
+          System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
         }
         LogManager.resetConfiguration();
         PropertyConfigurator.configure(log4jFileName);

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Tue Oct 29 17:25:55 2013
@@ -28,17 +28,16 @@ import java.math.RoundingMode;
  *
  */
 public class HiveDecimal implements Comparable<HiveDecimal> {
+  public static final int MAX_PRECISION = 65;
+  public static final int MAX_SCALE = 30;
+  public static final int DEFAULT_PRECISION = 10;
+  public static final int DEFAULT_SCALE = 0;
 
   public static final HiveDecimal ZERO = new HiveDecimal(BigDecimal.ZERO);
-
-  public static final int MAX_PRECISION = 38; // fits into 128 bits
-
   public static final HiveDecimal ONE = new HiveDecimal(BigDecimal.ONE);
 
   public static final int ROUND_FLOOR = BigDecimal.ROUND_FLOOR;
-
   public static final int ROUND_CEILING = BigDecimal.ROUND_CEILING;
-
   public static final int ROUND_HALF_UP = BigDecimal.ROUND_HALF_UP;
 
   private BigDecimal bd = BigDecimal.ZERO;
@@ -48,16 +47,16 @@ public class HiveDecimal implements Comp
   }
 
   public static HiveDecimal create(BigDecimal b) {
-    return create(b, false);
+    return create(b, true);
   }
 
   public static HiveDecimal create(BigDecimal b, boolean allowRounding) {
-    BigDecimal bd = normalize(b, HiveDecimal.MAX_PRECISION, allowRounding);
+    BigDecimal bd = normalize(b, allowRounding);
     return bd == null ? null : new HiveDecimal(bd);
   }
 
   public static HiveDecimal create(BigInteger unscaled, int scale) {
-    BigDecimal bd = normalize(new BigDecimal(unscaled, scale), HiveDecimal.MAX_PRECISION, false);
+    BigDecimal bd = normalize(new BigDecimal(unscaled, scale), true);
     return bd == null ? null : new HiveDecimal(bd);
   }
 
@@ -69,12 +68,12 @@ public class HiveDecimal implements Comp
       return null;
     }
 
-    bd = normalize(bd, HiveDecimal.MAX_PRECISION, false);
+    bd = normalize(bd, true);
     return bd == null ? null : new HiveDecimal(bd);
   }
 
   public static HiveDecimal create(BigInteger bi) {
-    BigDecimal bd = normalize(new BigDecimal(bi), HiveDecimal.MAX_PRECISION, false);
+    BigDecimal bd = normalize(new BigDecimal(bi), true);
     return bd == null ? null : new HiveDecimal(bd);
   }
 
@@ -92,7 +91,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal setScale(int i) {
-    return new HiveDecimal(bd.setScale(i));
+    return new HiveDecimal(bd.setScale(i, RoundingMode.HALF_UP));
   }
 
   @Override
@@ -158,7 +157,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal multiply(HiveDecimal dec) {
-    return create(bd.multiply(dec.bd));
+    return create(bd.multiply(dec.bd), false);
   }
 
   public BigInteger unscaledValue() {
@@ -182,7 +181,8 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal pow(int n) {
-    return create(bd.pow(n));
+    BigDecimal result = normalize(bd.pow(n), false);
+    return result == null ? null : new HiveDecimal(result);
   }
 
   public HiveDecimal remainder(HiveDecimal dec) {
@@ -190,7 +190,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal divide(HiveDecimal dec) {
-    return create(bd.divide(dec.bd, MAX_PRECISION, RoundingMode.HALF_UP), true);
+    return create(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP), true);
   }
 
   private static BigDecimal trim(BigDecimal d) {
@@ -207,31 +207,45 @@ public class HiveDecimal implements Comp
     return d;
   }
 
-  private static BigDecimal normalize(BigDecimal d, int precision, boolean allowRounding) {
-    if (d == null) {
+  private static BigDecimal normalize(BigDecimal bd, boolean allowRounding) {
+    if (bd == null) {
       return null;
     }
 
-    d = trim(d);
+    bd = trim(bd);
 
-    // compute the number of digits of the decimal
-    int valuePrecision = d.precision()
-        + Math.max(0, 1 + d.scale() - d.precision());
-
-    if (valuePrecision > precision) {
-      if (allowRounding) {
-        // round "half up" until we hit the decimal point
-        int adjustedScale = d.scale() - (valuePrecision-precision);
-        if (adjustedScale >= 0) {
-          d = d.setScale(adjustedScale, RoundingMode.HALF_UP);
-          d = trim(d);
-        } else {
-          d = null;
-        }
-      } else {
-        d = null;
-      }
+    int intDigits = bd.precision() - bd.scale();
+
+    if (intDigits > MAX_PRECISION) {
+      return null;
     }
-    return d;
+
+    int maxScale = Math.min(MAX_SCALE, Math.min(MAX_PRECISION - intDigits, bd.scale()));
+    if (bd.scale() > maxScale ) {
+      bd = allowRounding ? bd.setScale(maxScale, RoundingMode.HALF_UP) : null;
+    }
+
+    return bd;
   }
+
+  public static BigDecimal enforcePrecisionScale(BigDecimal bd, int maxPrecision, int maxScale) {
+    if (bd == null) {
+      return null;
+    }
+
+    bd = trim(bd);
+
+    int maxIntDigits = maxPrecision - maxScale;
+    int intDigits = bd.precision() - bd.scale();
+    if (intDigits > maxIntDigits) {
+      return null;
+    }
+
+    if (bd.scale() > maxScale) {
+      bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
+    }
+
+    return bd;
+  }
+
 }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Oct 29 17:25:55 2013
@@ -150,16 +150,6 @@ public class HiveConf extends Configurat
   };
 
   /**
-   * The conf variables that depends on current user
-   */
-  public static final HiveConf.ConfVars[] userVars = {
-    HiveConf.ConfVars.SCRATCHDIR,
-    HiveConf.ConfVars.LOCALSCRATCHDIR,
-    HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR,
-    HiveConf.ConfVars.HIVEHISTORYFILELOC
-  };
-
-  /**
    * ConfVars.
    *
    * These are the default configuration properties for Hive. Each HiveConf
@@ -763,7 +753,7 @@ public class HiveConf extends Configurat
     // Number of async threads
     HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 50),
     // Number of seconds HiveServer2 shutdown will wait for async threads to terminate
-    HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10),
+    HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10L),
 
 
     // HiveServer2 auth configuration
@@ -988,12 +978,12 @@ public class HiveConf extends Configurat
   }
 
   public static int getIntVar(Configuration conf, ConfVars var) {
-    assert (var.valClass == Integer.class);
+    assert (var.valClass == Integer.class) : var.varname;
     return conf.getInt(var.varname, var.defaultIntVal);
   }
 
   public static void setIntVar(Configuration conf, ConfVars var, int val) {
-    assert (var.valClass == Integer.class);
+    assert (var.valClass == Integer.class) : var.varname;
     conf.setInt(var.varname, val);
   }
 
@@ -1006,7 +996,7 @@ public class HiveConf extends Configurat
   }
 
   public static long getLongVar(Configuration conf, ConfVars var) {
-    assert (var.valClass == Long.class);
+    assert (var.valClass == Long.class) : var.varname;
     return conf.getLong(var.varname, var.defaultLongVal);
   }
 
@@ -1015,7 +1005,7 @@ public class HiveConf extends Configurat
   }
 
   public static void setLongVar(Configuration conf, ConfVars var, long val) {
-    assert (var.valClass == Long.class);
+    assert (var.valClass == Long.class) : var.varname;
     conf.setLong(var.varname, val);
   }
 
@@ -1028,7 +1018,7 @@ public class HiveConf extends Configurat
   }
 
   public static float getFloatVar(Configuration conf, ConfVars var) {
-    assert (var.valClass == Float.class);
+    assert (var.valClass == Float.class) : var.varname;
     return conf.getFloat(var.varname, var.defaultFloatVal);
   }
 
@@ -1037,7 +1027,7 @@ public class HiveConf extends Configurat
   }
 
   public static void setFloatVar(Configuration conf, ConfVars var, float val) {
-    assert (var.valClass == Float.class);
+    assert (var.valClass == Float.class) : var.varname;
     ShimLoader.getHadoopShims().setFloatConf(conf, var.varname, val);
   }
 
@@ -1050,7 +1040,7 @@ public class HiveConf extends Configurat
   }
 
   public static boolean getBoolVar(Configuration conf, ConfVars var) {
-    assert (var.valClass == Boolean.class);
+    assert (var.valClass == Boolean.class) : var.varname;
     return conf.getBoolean(var.varname, var.defaultBoolVal);
   }
 
@@ -1059,7 +1049,7 @@ public class HiveConf extends Configurat
   }
 
   public static void setBoolVar(Configuration conf, ConfVars var, boolean val) {
-    assert (var.valClass == Boolean.class);
+    assert (var.valClass == Boolean.class) : var.varname;
     conf.setBoolean(var.varname, val);
   }
 
@@ -1072,7 +1062,7 @@ public class HiveConf extends Configurat
   }
 
   public static String getVar(Configuration conf, ConfVars var) {
-    assert (var.valClass == String.class);
+    assert (var.valClass == String.class) : var.varname;
     return conf.get(var.varname, var.defaultVal);
   }
 
@@ -1081,7 +1071,7 @@ public class HiveConf extends Configurat
   }
 
   public static void setVar(Configuration conf, ConfVars var, String val) {
-    assert (var.valClass == String.class);
+    assert (var.valClass == String.class) : var.varname;
     conf.set(var.varname, val);
   }
 

Modified: hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java (original)
+++ hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java Tue Oct 29 17:25:55 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.conf;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.common.util.HiveTestUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -33,8 +34,7 @@ import org.junit.Test;
 public class TestHiveConf {
   @Test
   public void testHiveSitePath() throws Exception {
-    String expectedPath =
-        new Path(System.getProperty("test.build.resources") + "/hive-site.xml").toUri().getPath();
+    String expectedPath = HiveTestUtils.getFileFromClasspath("hive-site.xml");
     Assert.assertEquals(expectedPath, new HiveConf().getHiveSiteLocation().getPath());
   }
 

Modified: hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java (original)
+++ hive/branches/tez/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java Tue Oct 29 17:25:55 2013
@@ -24,7 +24,8 @@ import java.io.InputStreamReader;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.LogUtils;
-import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.common.util.HiveTestUtils;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 
 /**
@@ -43,53 +44,34 @@ public class TestHiveLogging extends Tes
     process = null;
   }
 
-  private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) {
-    System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname,
-      System.getProperty("test.build.resources") + "/" + hiveLog4jTest);
-    System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname,
-      System.getProperty("test.build.resources") + "/" + hiveExecLog4jTest);
-
-    String expectedLog4jPath = System.getProperty("test.build.resources")
-      + "/" + hiveLog4jTest;
-    String expectedLog4jExecPath = System.getProperty("test.build.resources")
-      + "/" + hiveExecLog4jTest;
-
-    try {
-      LogUtils.initHiveLog4j();
-    } catch (LogInitializationException e) {
-    }
+  private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) 
+  throws Exception {
+    String expectedLog4jTestPath = HiveTestUtils.getFileFromClasspath(hiveLog4jTest);
+    String expectedLog4jExecPath = HiveTestUtils.getFileFromClasspath(hiveExecLog4jTest);
+    System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname, expectedLog4jTestPath);
+    System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname, expectedLog4jExecPath);
+
+    LogUtils.initHiveLog4j();
 
     HiveConf conf = new HiveConf();
-    assertEquals(expectedLog4jPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE));
+    assertEquals(expectedLog4jTestPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE));
     assertEquals(expectedLog4jExecPath, conf.getVar(ConfVars.HIVE_EXEC_LOG4J_FILE));
   }
 
-  private void runCmd(String cmd) {
-    try {
-      process = runTime.exec(cmd);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    try {
-      process.waitFor();
-    } catch (InterruptedException e) {
-      e.printStackTrace();
-    }
+  private void runCmd(String cmd) throws Exception {
+    process = runTime.exec(cmd);
+    process.waitFor();
   }
 
-  private void getCmdOutput(String logFile) {
+  private void getCmdOutput(String logFile) throws Exception {
     boolean logCreated = false;
     BufferedReader buf = new BufferedReader(
       new InputStreamReader(process.getInputStream()));
     String line = "";
-    try {
-      while((line = buf.readLine()) != null) {
-        if (line.equals(logFile)) {
-          logCreated = true;
-        }
+    while((line = buf.readLine()) != null) {
+      if (line.equals(logFile)) {
+        logCreated = true;
       }
-    } catch (IOException e) {
-      e.printStackTrace();
     }
     assertEquals(true, logCreated);
   }
@@ -112,12 +94,12 @@ public class TestHiveLogging extends Tes
   }
 
   public void testHiveLogging() throws Exception {
-    // customized log4j config log file to be: /tmp/hiveLog4jTest.log
-    String customLogPath = "/tmp/";
+    // customized log4j config log file to be: /tmp/TestHiveLogging/hiveLog4jTest.log
+    String customLogPath = "/tmp/" + System.getProperty("user.name") + "-TestHiveLogging/";
     String customLogName = "hiveLog4jTest.log";
     String customLogFile = customLogPath + customLogName;
     String customCleanCmd = "rm -rf " + customLogFile;
-    String customFindCmd = "find /tmp -name " + customLogName;
+    String customFindCmd = "find " + customLogPath + " -name " + customLogName;
     RunTest(customCleanCmd, customFindCmd, customLogFile,
       "hive-log4j-test.properties", "hive-exec-log4j-test.properties");
   }

Modified: hive/branches/tez/common/src/test/resources/hive-exec-log4j-test.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/test/resources/hive-exec-log4j-test.properties?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/test/resources/hive-exec-log4j-test.properties (original)
+++ hive/branches/tez/common/src/test/resources/hive-exec-log4j-test.properties Tue Oct 29 17:25:55 2013
@@ -1,6 +1,6 @@
 # Define some default values that can be overridden by system properties
 hive.root.logger=INFO,FA
-hive.log.dir=/tmp
+hive.log.dir=/tmp/${user.name}-TestHiveLogging
 hive.log.file=hiveExecLog4jTest.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hive/branches/tez/common/src/test/resources/hive-log4j-test.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/test/resources/hive-log4j-test.properties?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/common/src/test/resources/hive-log4j-test.properties (original)
+++ hive/branches/tez/common/src/test/resources/hive-log4j-test.properties Tue Oct 29 17:25:55 2013
@@ -1,6 +1,6 @@
 # Define some default values that can be overridden by system properties
 hive.root.logger=WARN,DRFA
-hive.log.dir=/tmp
+hive.log.dir=/tmp/${user.name}-TestHiveLogging
 hive.log.file=hiveLog4jTest.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java Tue Oct 29 17:25:55 2013
@@ -585,13 +585,6 @@ class FileOutputCommitterContainer exten
         }
       }
 
-      //      for (Entry<String,Map<String,String>> spec : partitionsDiscoveredByPath.entrySet()){
-      //        LOG.info("Partition "+ spec.getKey());
-      //        for (Entry<String,String> e : spec.getValue().entrySet()){
-      //          LOG.info(e.getKey() + "=>" +e.getValue());
-      //        }
-      //      }
-
       this.partitionsDiscovered = true;
     }
   }
@@ -652,7 +645,6 @@ class FileOutputCommitterContainer exten
       for(Partition ptn : partitionsToAdd){
         ptnInfos.add(InternalUtil.createPtnKeyValueMap(new Table(tableInfo.getTable()), ptn));
       }
-
       //Publish the new partition(s)
       if (dynamicPartitioningUsed && harProcessor.isEnabled() && (!partitionsToAdd.isEmpty())){
 
@@ -678,7 +670,7 @@ class FileOutputCommitterContainer exten
           throw e;
         }
 
-      }else{
+      } else {
         // no harProcessor, regular operation
         updateTableSchema(client, table, jobInfo.getOutputSchema());
         LOG.info("HAR not is not being used. The table {} has new partitions {}.", table.getTableName(), ptnInfos);

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java Tue Oct 29 17:25:55 2013
@@ -51,7 +51,7 @@ import org.junit.Test;
  */
 public class TestPassProperties {
   private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-      "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
+      "/build/test/data/" + TestPassProperties.class.getCanonicalName();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
@@ -96,7 +96,7 @@ public class TestPassProperties {
       conf.set("hive.metastore.uris", "thrift://no.such.machine:10888");
       conf.set("hive.metastore.local", "false");
       Job job = new Job(conf, "Write-hcat-seq-table");
-      job.setJarByClass(TestSequenceFileReadWrite.class);
+      job.setJarByClass(TestPassProperties.class);
 
       job.setMapperClass(Map.class);
       job.setOutputKeyClass(NullWritable.class);

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java Tue Oct 29 17:25:55 2013
@@ -48,7 +48,7 @@ import org.junit.Test;
 
 public class TestPassProperties {
   private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-      "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
+      "/build/test/data/" + TestPassProperties.class.getCanonicalName();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
@@ -93,7 +93,7 @@ public class TestPassProperties {
       conf.set("hive.metastore.uris", "thrift://no.such.machine:10888");
       conf.set("hive.metastore.local", "false");
       Job job = new Job(conf, "Write-hcat-seq-table");
-      job.setJarByClass(TestSequenceFileReadWrite.class);
+      job.setJarByClass(TestPassProperties.class);
 
       job.setMapperClass(Map.class);
       job.setOutputKeyClass(NullWritable.class);

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt Tue Oct 29 17:25:55 2013
@@ -141,8 +141,10 @@ In order for this test suite to work, we
 and webhcat.proxyuser.hue.hosts defined, i.e. 'hue' should be allowed to impersonate 'joe'.
 [Of course, 'hcat' proxyuser should be configured in core-site.xml for the command to succeed.]
 
-Furthermore, metastore side file based security should be enabled.  To do this 3 properties in
-hive-site.xml should be configured:
+Furthermore, metastore side file based security should be enabled. 
+(See https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Authorization#LanguageManualAuthorization-MetastoreServerSecurity for more info) 
+
+To do this 3 properties in hive-site.xml should be configured:
 1) hive.security.metastore.authorization.manager set to 
     org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
 2) hive.security.metastore.authenticator.manager set to 

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Tue Oct 29 17:25:55 2013
@@ -788,7 +788,8 @@ sub compare
 
     if ( (defined $testCmd->{'check_job_created'})
          || (defined $testCmd->{'check_job_complete'})
-         || (defined $testCmd->{'check_job_exit_value'}) ) {    
+         || (defined $testCmd->{'check_job_exit_value'})
+         || (defined $testCmd->{'check_job_percent_complete'}) ) {    
       my $jobid = $json_hash->{'id'};
       if (!defined $jobid) {
         print $log "$0::$subName WARN check failed: " 
@@ -803,7 +804,8 @@ sub compare
             . "jobresult not defined ";
           $result = 0;
         }
-        if (defined($testCmd->{'check_job_complete'}) || defined($testCmd->{'check_job_exit_value'})) {
+        if (defined($testCmd->{'check_job_complete'}) || defined($testCmd->{'check_job_exit_value'})
+            || defined($testCmd->{'check_job_percent_complete'})) {
           my $jobComplete;
           my $NUM_RETRIES = 60;
           my $SLEEP_BETWEEN_RETRIES = 5;
@@ -841,6 +843,15 @@ sub compare
                 $result = 0;
               }
             }
+            # check the percentComplete value
+            if (defined($testCmd->{'check_job_percent_complete'})) {
+              my $pcValue = $res_hash->{'percentComplete'};
+              my $expectedPercentComplete = $testCmd->{'check_job_percent_complete'};
+              if ( (!defined $pcValue) || $pcValue ne $expectedPercentComplete ) {
+                print $log "check_job_percent_complete failed. got percentComplete $pcValue,  expected  $expectedPercentComplete";
+                $result = 0;
+              }
+            }
           }
 
 	  #Check userargs

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Tue Oct 29 17:25:55 2013
@@ -73,6 +73,7 @@ $cfg = 
      'status_code' => 200,
      'check_job_created' => 1,
      'check_job_complete' => 'SUCCESS',
+     'check_job_percent_complete' => 'map 100% reduce 100%',
      'check_job_exit_value' => 0,
      'check_call_back' => 1,
     },
@@ -166,6 +167,7 @@ $cfg = 
      'status_code' => 200,
      'check_job_created' => 1,
      'check_job_complete' => 'SUCCESS', 
+     'check_job_percent_complete' => '100% complete',
      'check_job_exit_value' => 0,
      'check_call_back' => 1,
     },
@@ -386,6 +388,7 @@ $cfg = 
      'status_code' => 200,
      'check_job_created' => 1,
      'check_job_complete' => 'SUCCESS', 
+     'check_job_percent_complete' => 'map 100% reduce 100%',
      'check_job_exit_value' => 0,
      'check_call_back' => 1,
 

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf Tue Oct 29 17:25:55 2013
@@ -54,7 +54,9 @@ $cfg = 
     },
     {
      #-ve test - no input file
-     'num' => 2,
+     #TempletonController job status should be success, but exit value should be 1
+     #if yarn log is redirected to stderr check_job_complete is FAILURE, if not SUCCESS (HIVE-5511)
+    'num' => 2,
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/mapreduce/streaming',
      'post_options' => ['user.name=:UNAME:','input=:INPDIR_HDFS:/nums.txt','input=:INPDIR_HDFS:/nums.txt','output=:OUTDIR:/mycounts', 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh Tue Oct 29 17:25:55 2013
@@ -75,7 +75,7 @@ elif [ -e "${WEBHCAT_PREFIX}/conf/webhca
 else
   DEFAULT_CONF_DIR="/etc/webhcat"
 fi
-WEBHCAT_CONF_DIR="${WEBHCAT_CONF_DIR:-$DEFAULT_CONF_DIR}"
+export WEBHCAT_CONF_DIR="${WEBHCAT_CONF_DIR:-$DEFAULT_CONF_DIR}"
 
 #users can add various env vars to webhcat-env.sh in the conf
 #rather than having to export them before running the command

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java Tue Oct 29 17:25:55 2013
@@ -71,6 +71,7 @@ public class AppConfig extends Configura
   };
 
   public static final String TEMPLETON_HOME_VAR = "TEMPLETON_HOME";
+  public static final String WEBHCAT_CONF_DIR = "WEBHCAT_CONF_DIR";
 
   public static final String[] TEMPLETON_CONF_FILENAMES = {
     "webhcat-default.xml",
@@ -153,6 +154,9 @@ public class AppConfig extends Configura
   public static String getTempletonDir() {
     return System.getenv(TEMPLETON_HOME_VAR);
   }
+  public static String getWebhcatConfDir() {
+    return System.getenv(WEBHCAT_CONF_DIR);
+  }
 
   private boolean loadOneFileConfig(String dir, String fname) {
     if (dir != null) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java Tue Oct 29 17:25:55 2013
@@ -68,7 +68,7 @@ public class CompleteDelegator extends T
     try {
       state = new JobState(id, Main.getAppConfigInstance());
       if (state.getCompleteStatus() == null)
-        failed("Job not yet complete. jobId=" + id + " Status from JT=" + jobStatus, null);
+        failed("Job not yet complete. jobId=" + id + " Status from JobTracker=" + jobStatus, null);
 
       Long notified = state.getNotifiedTime();
       if (notified != null) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java Tue Oct 29 17:25:55 2013
@@ -91,6 +91,12 @@ public class HDFSStorage implements Temp
     BufferedReader in = null;
     Path p = new Path(getPath(type) + "/" + id + "/" + key);
     try {
+      if(!fs.exists(p)) {
+        //check first, otherwise webhcat.log is full of stack traces from FileSystem when
+        //clients check for status ('exitValue', 'completed', etc.)
+        LOG.debug(p + " does not exist.");
+        return null;
+      }
       in = new BufferedReader(new InputStreamReader(fs.open(p)));
       String line = null;
       String val = "";
@@ -102,9 +108,7 @@ public class HDFSStorage implements Temp
       }
       return val;
     } catch (Exception e) {
-      //don't print stack trace since clients poll for 'exitValue', 'completed',
-      //files which are not there until job completes
-      LOG.info("Couldn't find " + p + ": " + e.getMessage());
+      LOG.error("Couldn't find " + p + ": " + e.getMessage(), e);
     } finally {
       close(in);
     }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HiveJobIDParser.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HiveJobIDParser.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HiveJobIDParser.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HiveJobIDParser.java Tue Oct 29 17:25:55 2013
@@ -32,6 +32,6 @@ class HiveJobIDParser extends JobIDParse
 
   @Override
   List<String> parseJobID() throws IOException {
-    return parseJobID(TempletonControllerJob.STDERR_FNAME, jobidPattern);
+    return parseJobID(JobSubmissionConstants.STDERR_FNAME, jobidPattern);
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JarJobIDParser.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JarJobIDParser.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JarJobIDParser.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JarJobIDParser.java Tue Oct 29 17:25:55 2013
@@ -32,7 +32,7 @@ class JarJobIDParser extends JobIDParser
 
   @Override
   List<String> parseJobID() throws IOException {
-    return parseJobID(TempletonControllerJob.STDERR_FNAME, jobidPattern);
+    return parseJobID(JobSubmissionConstants.STDERR_FNAME, jobidPattern);
   }
 
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/PigJobIDParser.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/PigJobIDParser.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/PigJobIDParser.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/PigJobIDParser.java Tue Oct 29 17:25:55 2013
@@ -32,6 +32,6 @@ class PigJobIDParser extends JobIDParser
 
   @Override
   List<String> parseJobID() throws IOException {
-    return parseJobID(TempletonControllerJob.STDERR_FNAME, jobidPattern);
+    return parseJobID(JobSubmissionConstants.STDERR_FNAME, jobidPattern);
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java Tue Oct 29 17:25:55 2013
@@ -18,23 +18,10 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
-import java.io.BufferedReader;
+import java.io.File;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
+import java.net.URI;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -42,24 +29,24 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Tool;
-import org.apache.hive.hcatalog.templeton.BadParam;
-import org.apache.hive.hcatalog.templeton.LauncherDelegator;
+import org.apache.hive.hcatalog.templeton.AppConfig;
+import org.apache.hive.hcatalog.templeton.Main;
 import org.apache.hive.hcatalog.templeton.SecureProxySupport;
 import org.apache.hive.hcatalog.templeton.UgiFactory;
 import org.apache.thrift.TException;
@@ -83,281 +70,99 @@ import org.apache.thrift.TException;
  * parameter supplied in the REST call.  WebHcat takes care of cancelling the token when the job
  * is complete.
  */
-public class TempletonControllerJob extends Configured implements Tool {
-  public static final String COPY_NAME = "templeton.copy";
-  public static final String STATUSDIR_NAME = "templeton.statusdir";
-  public static final String ENABLE_LOG = "templeton.enablelog";
-  public static final String JOB_TYPE = "templeton.jobtype";
-  public static final String JAR_ARGS_NAME = "templeton.args";
-  public static final String OVERRIDE_CLASSPATH = "templeton.override-classpath";
-
-  public static final String STDOUT_FNAME = "stdout";
-  public static final String STDERR_FNAME = "stderr";
-  public static final String EXIT_FNAME = "exit";
-
-  public static final int WATCHER_TIMEOUT_SECS = 10;
-  public static final int KEEP_ALIVE_MSEC = 60 * 1000;
-
-  public static final String TOKEN_FILE_ARG_PLACEHOLDER 
-    = "__WEBHCAT_TOKEN_FILE_LOCATION__";
-
-  private static TrivialExecService execService = TrivialExecService.getInstance();
-
+@InterfaceAudience.Private
+public class TempletonControllerJob extends Configured implements Tool, JobSubmissionConstants {
   private static final Log LOG = LogFactory.getLog(TempletonControllerJob.class);
-  private final boolean secureMetastoreAccess;
+  //file to add to DistributedCache
+  private static URI overrideLog4jURI = null;
+  private static boolean overrideContainerLog4jProps;
+  //Jar cmd submission likely will be affected, Pig likely not
+  private static final String affectedMsg = "Monitoring of Hadoop jobs submitted through WebHCat " +
+          "may be affected.";
+  private static final String TMP_DIR_PROP = "hadoop.tmp.dir";
 
   /**
-   * @param secureMetastoreAccess - if true, a delegation token will be created
-   *                              and added to the job
+   * Copy the file from local file system to tmp dir
    */
-  public TempletonControllerJob(boolean secureMetastoreAccess) {
-    super();
-    this.secureMetastoreAccess = secureMetastoreAccess;
-  }
-  public static class LaunchMapper
-    extends Mapper<NullWritable, NullWritable, Text, Text> {
-    protected Process startJob(Context context, String user,
-                   String overrideClasspath)
-      throws IOException, InterruptedException {
-      Configuration conf = context.getConfiguration();
-      copyLocal(COPY_NAME, conf);
-      String[] jarArgs
-        = TempletonUtils.decodeArray(conf.get(JAR_ARGS_NAME));
-
-      ArrayList<String> removeEnv = new ArrayList<String>();
-      removeEnv.add("HADOOP_ROOT_LOGGER");
-      removeEnv.add("hadoop-command");
-      removeEnv.add("CLASS");
-      removeEnv.add("mapredcommand");
-      Map<String, String> env = TempletonUtils.hadoopUserEnv(user,
-        overrideClasspath);
-      List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
-      String tokenFile = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
-
-
-      if (tokenFile != null) {
-        //Token is available, so replace the placeholder
-        tokenFile = tokenFile.replaceAll("\"", "");
-        String tokenArg = "mapreduce.job.credentials.binary=" + tokenFile;
-        if (Shell.WINDOWS) {
-          try {
-            tokenArg = TempletonUtils.quoteForWindows(tokenArg);
-          } catch (BadParam e) {
-            throw new IOException("cannot pass " + tokenFile + " to mapreduce.job.credentials.binary", e);
-          }
-        }
-        for(int i=0; i<jarArgsList.size(); i++){
-          String newArg = 
-            jarArgsList.get(i).replace(TOKEN_FILE_ARG_PLACEHOLDER, tokenArg);
-          jarArgsList.set(i, newArg);
-        }
-
-      }else{
-        //No token, so remove the placeholder arg
-        Iterator<String> it = jarArgsList.iterator();
-        while(it.hasNext()){
-          String arg = it.next();
-          if(arg.contains(TOKEN_FILE_ARG_PLACEHOLDER)){
-            it.remove();
-          }
-        }
-      }
-      return execService.run(jarArgsList, removeEnv, env);
-    }
-
-    private void copyLocal(String var, Configuration conf)
-      throws IOException {
-      String[] filenames = TempletonUtils.decodeArray(conf.get(var));
-      if (filenames != null) {
-        for (String filename : filenames) {
-          Path src = new Path(filename);
-          Path dst = new Path(src.getName());
-          FileSystem fs = src.getFileSystem(conf);
-          System.err.println("templeton: copy " + src + " => " + dst);
-          fs.copyToLocalFile(src, dst);
+  private static URI copyLog4JtoFileSystem(final String localFile) throws IOException,
+          InterruptedException {
+    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    return ugi.doAs(new PrivilegedExceptionAction<URI>() {
+      @Override
+      public URI run() throws IOException {
+        AppConfig appConfig = Main.getAppConfigInstance();
+        String fsTmpDir = appConfig.get(TMP_DIR_PROP);
+        if(fsTmpDir == null || fsTmpDir.length() <= 0) {
+          LOG.warn("Could not find 'hadoop.tmp.dir'; " + affectedMsg);
+          return null;
         }
-      }
-    }
-
-    @Override
-    public void run(Context context)
-      throws IOException, InterruptedException {
-
-      Configuration conf = context.getConfiguration();
-
-      Process proc = startJob(context,
-        conf.get("user.name"),
-        conf.get(OVERRIDE_CLASSPATH));
-
-      String statusdir = conf.get(STATUSDIR_NAME);
-
-      if (statusdir != null) {
-        try {
-          statusdir = TempletonUtils.addUserHomeDirectoryIfApplicable(statusdir,
-            conf.get("user.name"));
-        } catch (URISyntaxException e) {
-          throw new IOException("Invalid status dir URI", e);
+        FileSystem fs = FileSystem.get(appConfig);
+        Path dirPath = new Path(fsTmpDir);
+        if(!fs.exists(dirPath)) {
+          LOG.warn(dirPath + " does not exist; " + affectedMsg);
+          return null;
         }
+        Path dst = fs.makeQualified(new Path(fsTmpDir, CONTAINER_LOG4J_PROPS));
+        fs.copyFromLocalFile(new Path(localFile), dst);
+        //make readable by all users since TempletonControllerJob#run() is run as submitting user
+        fs.setPermission(dst, new FsPermission((short)0644));
+        return dst.toUri();
       }
-
-      Boolean enablelog = Boolean.parseBoolean(conf.get(ENABLE_LOG));
-      LauncherDelegator.JobType jobType = LauncherDelegator.JobType.valueOf(conf.get(JOB_TYPE));
-
-      ExecutorService pool = Executors.newCachedThreadPool();
-      executeWatcher(pool, conf, context.getJobID(),
-        proc.getInputStream(), statusdir, STDOUT_FNAME);
-      executeWatcher(pool, conf, context.getJobID(),
-        proc.getErrorStream(), statusdir, STDERR_FNAME);
-      KeepAlive keepAlive = startCounterKeepAlive(pool, context);
-
-      proc.waitFor();
-      keepAlive.sendReport = false;
-      pool.shutdown();
-      if (!pool.awaitTermination(WATCHER_TIMEOUT_SECS, TimeUnit.SECONDS)) {
-        pool.shutdownNow();
-      }
-
-      writeExitValue(conf, proc.exitValue(), statusdir);
-      JobState state = new JobState(context.getJobID().toString(), conf);
-      state.setExitValue(proc.exitValue());
-      state.setCompleteStatus("done");
-      state.close();
-
-      if (enablelog && TempletonUtils.isset(statusdir)) {
-        System.err.println("templeton: collecting logs for " + context.getJobID().toString()
-          + " to " + statusdir + "/logs");
-        LogRetriever logRetriever = new LogRetriever(statusdir, jobType, conf);
-        logRetriever.run();
-      }
-
-      if (proc.exitValue() != 0) {
-        System.err.println("templeton: job failed with exit code "
-          + proc.exitValue());
-      }
-      else {
-        System.err.println("templeton: job completed with exit code 0");
-      }
-    }
-
-    private void executeWatcher(ExecutorService pool, Configuration conf,
-                  JobID jobid, InputStream in, String statusdir,
-                  String name)
-      throws IOException {
-      Watcher w = new Watcher(conf, jobid, in, statusdir, name);
-      pool.execute(w);
-    }
-
-    private KeepAlive startCounterKeepAlive(ExecutorService pool, Context context)
-      throws IOException {
-      KeepAlive k = new KeepAlive(context);
-      pool.execute(k);
-      return k;
-    }
-
-    private void writeExitValue(Configuration conf, int exitValue, String statusdir)
-      throws IOException {
-      if (TempletonUtils.isset(statusdir)) {
-        Path p = new Path(statusdir, EXIT_FNAME);
-        FileSystem fs = p.getFileSystem(conf);
-        OutputStream out = fs.create(p);
-        System.err.println("templeton: Writing exit value "
-          + exitValue + " to " + p);
-        PrintWriter writer = new PrintWriter(out);
-        writer.println(exitValue);
-        writer.close();
-      }
-    }
+    });
   }
-
-  private static class Watcher implements Runnable {
-    private final InputStream in;
-    private OutputStream out;
-    private final JobID jobid;
-    private final Configuration conf;
-
-    public Watcher(Configuration conf, JobID jobid, InputStream in,
-             String statusdir, String name)
-      throws IOException {
-      this.conf = conf;
-      this.jobid = jobid;
-      this.in = in;
-
-      if (name.equals(STDERR_FNAME))
-        out = System.err;
-      else
-        out = System.out;
-
-      if (TempletonUtils.isset(statusdir)) {
-        Path p = new Path(statusdir, name);
-        FileSystem fs = p.getFileSystem(conf);
-        out = fs.create(p);
-        System.err.println("templeton: Writing status to " + p);
-      }
-    }
-
-    @Override
-    public void run() {
-      try {
-        InputStreamReader isr = new InputStreamReader(in);
-        BufferedReader reader = new BufferedReader(isr);
-        PrintWriter writer = new PrintWriter(out);
-
-        String line;
-        while ((line = reader.readLine()) != null) {
-          writer.println(line);
-          JobState state = null;
+  /**
+   * local file system
+   * @return
+   */
+  private static String getLog4JPropsLocal() {
+    return AppConfig.getWebhcatConfDir() + File.separator + CONTAINER_LOG4J_PROPS;
+  }
+  static {
+    //initialize once-per-JVM (i.e. one running WebHCat server) state and log it once since it's 
+    // the same for every job
+    try {
+      //safe (thread) publication 
+      // http://docs.oracle.com/javase/specs/jls/se5.0/html/execution.html#12.4.2
+      LOG.info("Using Hadoop Version: " + ShimLoader.getMajorVersion());
+      overrideContainerLog4jProps = "0.23".equals(ShimLoader.getMajorVersion());
+      if(overrideContainerLog4jProps) {
+        //see detailed note in CONTAINER_LOG4J_PROPS file
+        LOG.info(AppConfig.WEBHCAT_CONF_DIR + "=" + AppConfig.getWebhcatConfDir());
+        File localFile = new File(getLog4JPropsLocal());
+        if(localFile.exists()) {
+          LOG.info("Found " + localFile.getAbsolutePath() + " to use for job submission.");
           try {
-            String percent = TempletonUtils.extractPercentComplete(line);
-            String childid = TempletonUtils.extractChildJobId(line);
-
-            if (percent != null || childid != null) {
-              state = new JobState(jobid.toString(), conf);
-              state.setPercentComplete(percent);
-              state.setChildId(childid);
-            }
-          } catch (IOException e) {
-            System.err.println("templeton: state error: " + e);
-          } finally {
-            if (state != null) {
-              try {
-                state.close();
-              } catch (IOException e) {
-              }
-            }
+            overrideLog4jURI = copyLog4JtoFileSystem(getLog4JPropsLocal());
+            LOG.info("Job submission will use log4j.properties=" + overrideLog4jURI);
+          }
+          catch(IOException ex) {
+            LOG.warn("Will not add " + CONTAINER_LOG4J_PROPS + " to Distributed Cache.  " +
+                    "Some fields in job status may be unavailable", ex);
           }
         }
-        writer.flush();
-      } catch (IOException e) {
-        System.err.println("templeton: execute error: " + e);
+        else {
+          LOG.warn("Could not find " + localFile.getAbsolutePath() + ". " + affectedMsg);
+        }
       }
     }
+    catch(Throwable t) {
+      //this intentionally doesn't use TempletonControllerJob.class.getName() to be able to
+      //log errors which may be due to class loading
+      String msg = "org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob is not " +
+              "properly initialized. " + affectedMsg;
+      LOG.error(msg, t);
+    }
   }
 
-  public static class KeepAlive implements Runnable {
-    private Context context;
-    public boolean sendReport;
-
-    public KeepAlive(Context context)
-    {
-      this.sendReport = true;
-      this.context = context;
-    }
+  private final boolean secureMetastoreAccess;
 
-    @Override
-    public void run() {
-      try {
-        while (sendReport) {
-          // Periodically report progress on the Context object
-          // to prevent TaskTracker from killing the Templeton
-          // Controller task
-          context.progress();
-          System.err.println("KeepAlive Heart beat");
-          Thread.sleep(KEEP_ALIVE_MSEC);
-        }
-      } catch (InterruptedException e) {
-        // Ok to be interrupted
-      }
-    }
+  /**
+   * @param secureMetastoreAccess - if true, a delegation token will be created
+   *                              and added to the job
+   */
+  public TempletonControllerJob(boolean secureMetastoreAccess) {
+    super();
+    this.secureMetastoreAccess = secureMetastoreAccess;
   }
 
   private JobID submittedJobId;
@@ -365,8 +170,7 @@ public class TempletonControllerJob exte
   public String getSubmittedId() {
     if (submittedJobId == null) {
       return null;
-    }
-    else {
+    } else {
       return submittedJobId.toString();
     }
   }
@@ -376,20 +180,39 @@ public class TempletonControllerJob exte
    * @see org.apache.hive.hcatalog.templeton.CompleteDelegator
    */
   @Override
-  public int run(String[] args)
-    throws IOException, InterruptedException, ClassNotFoundException, TException {
+  public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException, 
+          TException {
     Configuration conf = getConf();
-    
+
     conf.set(JAR_ARGS_NAME, TempletonUtils.encodeArray(args));
     String user = UserGroupInformation.getCurrentUser().getShortUserName();
     conf.set("user.name", user);
+    if(overrideContainerLog4jProps && overrideLog4jURI != null) {
+      //must be done before Job object is created
+      conf.set(OVERRIDE_CONTAINER_LOG4J_PROPS, Boolean.TRUE.toString());
+    }
     Job job = new Job(conf);
-    job.setJarByClass(TempletonControllerJob.class);
-    job.setJobName("TempletonControllerJob");
+    job.setJarByClass(LaunchMapper.class);
+    job.setJobName(TempletonControllerJob.class.getSimpleName());
     job.setMapperClass(LaunchMapper.class);
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(Text.class);
     job.setInputFormatClass(SingleInputFormat.class);
+    if(overrideContainerLog4jProps && overrideLog4jURI != null) {
+      FileSystem fs = FileSystem.get(conf);
+      if(fs.exists(new Path(overrideLog4jURI))) {
+        ShimLoader.getHadoopShims().getWebHCatShim(conf, UgiFactory.getUgi(user)).addCacheFile(
+                overrideLog4jURI, job);
+        LOG.debug("added " + overrideLog4jURI + " to Dist Cache");
+      }
+      else {
+        //in case this file was deleted by someone issue a warning but don't try to add to 
+        // DistributedCache as that will throw and fail job submission
+        LOG.warn("Cannot find " + overrideLog4jURI + " which is created on WebHCat startup/job " +
+                "submission.  " + affectedMsg);
+      }
+    }
+
     NullOutputFormat<NullWritable, NullWritable> of = new NullOutputFormat<NullWritable, NullWritable>();
     job.setOutputFormatClass(of.getClass());
     job.setNumReduceTasks(0);
@@ -404,13 +227,16 @@ public class TempletonControllerJob exte
     job.submit();
 
     submittedJobId = job.getJobID();
-
     if(metastoreTokenStrForm != null) {
       //so that it can be cancelled later from CompleteDelegator
       DelegationTokenCache.getStringFormTokenCache().storeDelegationToken(
               submittedJobId.toString(), metastoreTokenStrForm);
-      LOG.debug("Added metastore delegation token for jobId=" + submittedJobId.toString() + " " +
-              "user=" + user);
+      LOG.debug("Added metastore delegation token for jobId=" + submittedJobId.toString() +
+              " user=" + user);
+    }
+    if(overrideContainerLog4jProps && overrideLog4jURI == null) {
+      //do this here so that log msg has JobID
+      LOG.warn("Could not override container log4j properties for " + submittedJobId);
     }
     return 0;
   }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java Tue Oct 29 17:25:55 2013
@@ -85,10 +85,11 @@ public class TempletonUtils {
     return (col != null) && (!col.isEmpty());
   }
 
-
-  public static final Pattern JAR_COMPLETE
-    = Pattern.compile(" map \\d+%\\s+reduce \\d+%$");
+  //looking for map 100% reduce 100%
+  public static final Pattern JAR_COMPLETE = Pattern.compile(" map \\d+%\\s+reduce \\d+%$");
   public static final Pattern PIG_COMPLETE = Pattern.compile(" \\d+% complete$");
+  //looking for map = 100%,  reduce = 100%
+  public static final Pattern HIVE_COMPLETE = Pattern.compile(" map = \\d+%,\\s+reduce = \\d+%$");
 
   /**
    * Extract the percent complete line from Pig or Jar jobs.
@@ -101,7 +102,19 @@ public class TempletonUtils {
     Matcher pig = PIG_COMPLETE.matcher(line);
     if (pig.find())
       return pig.group().trim();
-
+    
+    Matcher hive = HIVE_COMPLETE.matcher(line);
+    if(hive.find()) {
+      StringBuilder sb = new StringBuilder(hive.group().trim());
+      String[] toRemove = {"= ", ", "};
+      for(String pattern : toRemove) {
+        int pos;
+        while((pos = sb.indexOf(pattern)) >= 0) {
+          sb.delete(pos, pos + pattern.length());
+        }
+      }
+      return sb.toString();//normalized to look like JAR_COMPLETE
+    }
     return null;
   }
 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java Tue Oct 29 17:25:55 2013
@@ -18,21 +18,30 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 
 /**
  * Execute a local program.  This is a singleton service that will
  * execute a programs on the local box.
+ * 
+ * Note that is is executed from LaunchMapper which is executed in 
+ * different JVM from WebHCat (Templeton) server.  Thus it should not call any classes
+ * not available on every node in the cluster (outside webhcat jar)
  */
-public class TrivialExecService {
-  private static volatile TrivialExecService theSingleton;
+final class TrivialExecService {
+  //with default log4j config, this output ends up in 'syslog' of the LaunchMapper task
   private static final Log LOG = LogFactory.getLog(TrivialExecService.class);
-
+  private static volatile TrivialExecService theSingleton;
+  private static final String HADOOP_CLIENT_OPTS = "HADOOP_CLIENT_OPTS";
   /**
    * Retrieve the singleton.
    */
@@ -41,32 +50,53 @@ public class TrivialExecService {
       theSingleton = new TrivialExecService();
     return theSingleton;
   }
-
+  /**
+   * See {@link JobSubmissionConstants#CONTAINER_LOG4J_PROPS} file for details.
+   */
+  private static void hadoop2LogRedirect(ProcessBuilder processBuilder) {
+    Map<String, String> env = processBuilder.environment();
+    if(!env.containsKey(HADOOP_CLIENT_OPTS)) {
+      return;
+    }
+    String hcopts = env.get(HADOOP_CLIENT_OPTS);
+    if(!hcopts.contains("log4j.configuration=container-log4j.properties")) {
+      return;
+    }
+    //TempletonControllerJob ensures that this file is in DistributedCache
+    File log4jProps = new File(JobSubmissionConstants.CONTAINER_LOG4J_PROPS);
+    hcopts = hcopts.replace("log4j.configuration=container-log4j.properties",
+            "log4j.configuration=file://" + log4jProps.getAbsolutePath());
+    //helps figure out what log4j is doing, but may confuse 
+    //some jobs due to extra output to stdout
+    //hcopts = hcopts + " -Dlog4j.debug=true";
+    env.put(HADOOP_CLIENT_OPTS, hcopts);
+  }
   public Process run(List<String> cmd, List<String> removeEnv,
-             Map<String, String> environmentVariables)
+             Map<String, String> environmentVariables, boolean overrideContainerLog4jProps)
     throws IOException {
-    logDebugCmd(cmd, environmentVariables);
+    LOG.info("run(cmd, removeEnv, environmentVariables, " + overrideContainerLog4jProps + ")");
+    LOG.info("Starting cmd: " + cmd);
     ProcessBuilder pb = new ProcessBuilder(cmd);
-    for (String key : removeEnv)
+    for (String key : removeEnv) {
+      if(pb.environment().containsKey(key)) {
+        LOG.info("Removing env var: " + key + "=" + pb.environment().get(key));
+      }
       pb.environment().remove(key);
+    }
     pb.environment().putAll(environmentVariables);
+    if(overrideContainerLog4jProps) {
+      hadoop2LogRedirect(pb);
+    }
+    logDebugInfo("Starting process with env:", pb.environment());
     return pb.start();
   }
-
-  private void logDebugCmd(List<String> cmd,
-    Map<String, String> environmentVariables) {
-    if(!LOG.isDebugEnabled()){
-      return;
-    }
-    LOG.debug("starting " + cmd);
-    LOG.debug("With environment variables: " );
-    for(Map.Entry<String, String> keyVal : environmentVariables.entrySet()){
-      LOG.debug(keyVal.getKey() + "=" + keyVal.getValue());
-    }
-    LOG.debug("With environment variables already set: " );
-    Map<String, String> env = System.getenv();
-    for (String envName : env.keySet()) {
-      LOG.debug(envName + "=" + env.get(envName));
-    }
+  private static void logDebugInfo(String msg, Map<String, String> props) {
+    LOG.info(msg);
+    List<String> keys = new ArrayList<String>();
+    keys.addAll(props.keySet());
+    Collections.sort(keys);
+    for(String key : keys) {
+      LOG.info(key + "=" + props.get(key));
+    }    
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTrivialExecService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTrivialExecService.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTrivialExecService.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTrivialExecService.java Tue Oct 29 17:25:55 2013
@@ -38,7 +38,7 @@ public class TestTrivialExecService {
       Process process = TrivialExecService.getInstance()
         .run(list,
           new ArrayList<String>(),
-          new HashMap<String, String>());
+          new HashMap<String, String>(),false);
       out = new BufferedReader(new InputStreamReader(
         process.getInputStream()));
       err = new BufferedReader(new InputStreamReader(

Modified: hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=1536823&r1=1536822&r2=1536823&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Tue Oct 29 17:25:55 2013
@@ -114,7 +114,7 @@ public class HiveResultSetMetaData imple
       return serdeConstants.DATE_TYPE_NAME;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return serdeConstants.TIMESTAMP_TYPE_NAME;
-    } else if ("decimal".equalsIgnoreCase(type)) {
+    } else if (type.startsWith("decimal")) {
       return serdeConstants.DECIMAL_TYPE_NAME;
     } else if (type.startsWith("map<")) {
       return serdeConstants.STRING_TYPE_NAME;