You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 05:44:26 UTC

svn commit: r1629562 [1/38] - in /hive/branches/spark: ./ accumulo-handler/ beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/common/type/ contrib/src...

Author: brock
Date: Mon Oct  6 03:44:13 2014
New Revision: 1629562

URL: http://svn.apache.org/r1629562
Log:
Revert HIVE-8303: Merge from trunk to spark 9/29/14

Removed:
    hive/branches/spark/data/files/avro_charvarchar.txt
    hive/branches/spark/data/files/cbo_t1.txt
    hive/branches/spark/data/files/cbo_t2.txt
    hive/branches/spark/data/files/cbo_t3.txt
    hive/branches/spark/data/files/cbo_t4.txt
    hive/branches/spark/data/files/cbo_t5.txt
    hive/branches/spark/data/files/cbo_t6.txt
    hive/branches/spark/data/files/location.txt
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseStructValue.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/StructHBaseValueFactory.java
    hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestStructSerializer.java
    hive/branches/spark/hbase-handler/src/test/queries/positive/hbase_ppd_join.q
    hive/branches/spark/hbase-handler/src/test/results/positive/hbase_ppd_join.q.out
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/ClosedOrCancelledStatementException.java
    hive/branches/spark/metastore/scripts/upgrade/mssql/003-HIVE-8239.mssql.sql
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/TezDummyStoreOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomVertexConfiguration.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordSource.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValuesInputMerger.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MergeJoinProc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java
    hive/branches/spark/ql/src/test/queries/clientnegative/alter_partition_change_col_dup_col.q
    hive/branches/spark/ql/src/test/queries/clientnegative/alter_partition_change_col_nonexist.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_not_owner_drop_tab2.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_columns.q
    hive/branches/spark/ql/src/test/queries/clientnegative/delete_non_acid_table.q
    hive/branches/spark/ql/src/test/queries/clientnegative/update_non_acid_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_partition_change_col.q
    hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_groupby2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_grant_option_role.q
    hive/branches/spark/ql/src/test/queries/clientpositive/avro_charvarchar.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_correctness.q
    hive/branches/spark/ql/src/test/queries/clientpositive/constantPropagateForSubQuery.q
    hive/branches/spark/ql/src/test/queries/clientpositive/decimal_udf2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/drop_table_purge.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_merge_multi_expressions.q
    hive/branches/spark/ql/src/test/queries/clientpositive/tez_smb_1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/tez_smb_main.q
    hive/branches/spark/ql/src/test/queries/clientpositive/tez_union_group_by.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorized_dynamic_partition_pruning.q
    hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab2.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/authorization_show_columns.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/delete_non_acid_table.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/update_non_acid_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_change_col.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_grant_option_role.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/avro_charvarchar.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_correctness.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_udf2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/drop_table_purge.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_merge_multi_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_correctness.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_smb_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_smb_main.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union_group_by.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
Modified:
    hive/branches/spark/accumulo-handler/pom.xml
    hive/branches/spark/beeline/pom.xml
    hive/branches/spark/beeline/src/java/org/apache/hive/beeline/BeeLine.java
    hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java
    hive/branches/spark/bin/ext/beeline.sh
    hive/branches/spark/common/pom.xml
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
    hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
    hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_add.q.out
    hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_format.q.out
    hive/branches/spark/data/files/parquet_types.txt
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
    hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseValueFactory.java
    hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
    hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
    hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
    hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
    hive/branches/spark/jdbc/pom.xml
    hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
    hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/TSetIpAddressProcessor.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
    hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/IpAddressListener.java
    hive/branches/spark/pom.xml
    hive/branches/spark/ql/if/queryplan.thrift
    hive/branches/spark/ql/pom.xml
    hive/branches/spark/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
    hive/branches/spark/ql/src/gen/thrift/gen-cpp/queryplan_types.h
    hive/branches/spark/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
    hive/branches/spark/ql/src/gen/thrift/gen-php/Types.php
    hive/branches/spark/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
    hive/branches/spark/ql/src/gen/thrift/gen-rb/queryplan_types.rb
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/AppMasterEventOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileTezProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/TezMergedLogicalInput.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerWriterV2.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SerializationUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ArrayWritableGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/metainfo/annotation/OpTraitsRulesProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezProcContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OpTraits.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzPluginException.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzSessionContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeInfo.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestSerializationUtils.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/plan/TestTezWork.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMath.java
    hive/branches/spark/ql/src/test/queries/clientnegative/acid_overwrite.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
    hive/branches/spark/ql/src/test/queries/clientnegative/update_no_such_table.q
    hive/branches/spark/ql/src/test/queries/clientnegative/update_partition_col.q
    hive/branches/spark/ql/src/test/queries/clientpositive/acid_vectorization.q
    hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_groupby.q
    hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_part.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_delete.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_delete_own_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_update.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_update_own_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/create_func1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/decimal_udf.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_all_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_all_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_orig_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_tmp_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_where_no_match.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_where_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_where_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_whole_partition.q
    hive/branches/spark/ql/src/test/queries/clientpositive/dynamic_partition_pruning_2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q
    hive/branches/spark/ql/src/test/queries/clientpositive/dynpart_sort_optimization.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_orig_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_update_delete.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_dynamic_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_orig_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_tmp_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/parquet_types.q
    hive/branches/spark/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/quote2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_after_multiple_inserts.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_all_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_all_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_all_types.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_orig_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_tmp_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_two_cols.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_where_no_match.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_where_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_where_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_char_simple.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_varchar_simple.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorization_0.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorized_date_funcs.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorized_timestamp_funcs.q
    hive/branches/spark/ql/src/test/queries/positive/udf6.q
    hive/branches/spark/ql/src/test/resources/orc-file-dump-dictionary-threshold.out
    hive/branches/spark/ql/src/test/resources/orc-file-dump.out
    hive/branches/spark/ql/src/test/results/clientnegative/limit_partition_stats.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_local_resource.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_merge_stats_orc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_select.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_explain.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/binarysortable_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/column_access_stats.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/combine2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/create_func1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dynpart_sort_opt_vectorization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dynpart_sort_optimization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/explain_dependency.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/explain_logical.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/filter_numeric.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_cube1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_rollup1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_sort_11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/groupby_sort_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input24.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/insert_values_tmp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/keyword_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/limit0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/limit_partition_metadataonly.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/limit_pushdown.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/metadata_only_queries.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/metadata_only_queries_with_filters.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/metadataonly1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nonmr_fetch.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nullgroup3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_analyze.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_create.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_create.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_constant_where.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppr_pushdown3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/query_properties.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/quote1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/quote2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/regex_col.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/schemeAuthority.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/schemeAuthority2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/select_dummy_source.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/serde_user_properties.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/alter_merge_stats_orc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/bucket2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/bucket3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/bucket4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/column_access_stats.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/count.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/ctas.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/escape_clusterby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/escape_distributeby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/escape_orderby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/escape_sortby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map_multi_distinct.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_noskew.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_noskew_multi_distinct.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_position.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_rollup1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/innerjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input1_limit.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/input_part2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/join_nullsafe.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/mapreduce1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/mapreduce2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/metadata_only_queries.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/metadata_only_queries_with_filters.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_move_tasks_share_dependencies.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/multigroupby_singlemr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/order.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/order2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_multi_insert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_transform.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sample9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/sort.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union28.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union30.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union33.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_24.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorization_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats_only_null.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_alias.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin_having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/alter_merge_stats_orc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_join0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_optimization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/insert_values_tmp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/join0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/limit_pushdown.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/metadata_only_queries.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/mrr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge_incompat1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge_incompat2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_join_tests.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_joins_explain.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/union7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/update_all_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_char_simple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_varchar_simple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_case.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_current_database.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_elt.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_explode.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_if.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_reflect2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_using.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_when.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_explode.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_24.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/update_all_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_char_simple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_elt.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_varchar_simple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorization_0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorization_limit.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
    hive/branches/spark/ql/src/test/results/compiler/parse/udf6.q.out
    hive/branches/spark/ql/src/test/results/compiler/plan/cast1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input7.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input9.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/union.q.xml
    hive/branches/spark/serde/pom.xml
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
    hive/branches/spark/serde/src/test/resources/avro-struct.avsc
    hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
    hive/branches/spark/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
    hive/branches/spark/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
    hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
    hive/branches/spark/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java

Modified: hive/branches/spark/accumulo-handler/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/pom.xml?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/accumulo-handler/pom.xml (original)
+++ hive/branches/spark/accumulo-handler/pom.xml Mon Oct  6 03:44:13 2014
@@ -112,12 +112,6 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-core</artifactId>
           <version>${hadoop-20S.version}</version>
          <optional>true</optional>
@@ -129,12 +123,6 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
           <version>${hadoop-23.version}</version>
           <optional>true</optional>

Modified: hive/branches/spark/beeline/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/beeline/pom.xml?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/beeline/pom.xml (original)
+++ hive/branches/spark/beeline/pom.xml Mon Oct  6 03:44:13 2014
@@ -49,11 +49,6 @@
       <artifactId>hive-shims</artifactId>
       <version>${project.version}</version>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-jdbc</artifactId>
-      <version>${project.version}</version>
-    </dependency>
     <!-- inter-project -->
     <dependency>
       <groupId>commons-cli</groupId>
@@ -93,6 +88,12 @@
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>
       <classifier>tests</classifier>

Modified: hive/branches/spark/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/spark/beeline/src/java/org/apache/hive/beeline/BeeLine.java Mon Oct  6 03:44:13 2014
@@ -692,6 +692,10 @@ public class BeeLine implements Closeabl
 
     int code = 0;
     if (!commands.isEmpty()) {
+      // for single command execute, disable color
+      getOpts().setColor(false);
+      getOpts().setHeaderInterval(-1);
+
       for (Iterator<String> i = commands.iterator(); i.hasNext();) {
         String command = i.next().toString();
         debug(loc("executing-command", command));

Modified: hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java (original)
+++ hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java Mon Oct  6 03:44:13 2014
@@ -38,7 +38,6 @@ import java.sql.Driver;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.sql.SQLWarning;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.LinkedList;
@@ -48,13 +47,10 @@ import java.util.Set;
 import java.util.TreeSet;
 
 import org.apache.hadoop.hive.common.cli.ShellCmdExecutor;
-import org.apache.hive.jdbc.HiveStatement;
 
 
 public class Commands {
   private final BeeLine beeLine;
-  private static final int DEFAULT_QUERY_PROGRESS_INTERVAL = 1000;
-  private static final int DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT = 10 * 1000;
 
   /**
    * @param beeLine
@@ -732,7 +728,7 @@ public class Commands {
       beeLine.handleException(e);
     }
 
-    line = line.trim();
+
     if (line.endsWith(";")) {
       line = line.substring(0, line.length() - 1);
     }
@@ -762,7 +758,6 @@ public class Commands {
     try {
       Statement stmnt = null;
       boolean hasResults;
-      Thread logThread = null;
 
       try {
         long start = System.currentTimeMillis();
@@ -772,15 +767,7 @@ public class Commands {
           hasResults = ((CallableStatement) stmnt).execute();
         } else {
           stmnt = beeLine.createStatement();
-          if (beeLine.getOpts().isSilent()) {
-            hasResults = stmnt.execute(sql);
-          } else {
-            logThread = new Thread(createLogRunnable(stmnt));
-            logThread.setDaemon(true);
-            logThread.start();
-            hasResults = stmnt.execute(sql);
-            logThread.interrupt();
-          }
+          hasResults = stmnt.execute(sql);
         }
 
         beeLine.showWarnings();
@@ -795,11 +782,6 @@ public class Commands {
               beeLine.info(beeLine.loc("rows-selected", count) + " "
                   + beeLine.locElapsedTime(end - start));
             } finally {
-              if (logThread != null) {
-                logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT);
-                showRemainingLogsIfAny(stmnt);
-                logThread = null;
-              }
               rs.close();
             }
           } while (BeeLine.getMoreResults(stmnt));
@@ -810,13 +792,6 @@ public class Commands {
               + " " + beeLine.locElapsedTime(end - start));
         }
       } finally {
-        if (logThread != null) {
-          if (!logThread.isInterrupted()) {
-            logThread.interrupt();
-          }
-          logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT);
-          showRemainingLogsIfAny(stmnt);
-        }
         if (stmnt != null) {
           stmnt.close();
         }
@@ -828,61 +803,6 @@ public class Commands {
     return true;
   }
 
-  private Runnable createLogRunnable(Statement statement) {
-    if (statement instanceof HiveStatement) {
-      final HiveStatement hiveStatement = (HiveStatement) statement;
-
-      Runnable runnable = new Runnable() {
-        @Override
-        public void run() {
-          while (hiveStatement.hasMoreLogs()) {
-            try {
-              // fetch the log periodically and output to beeline console
-              for (String log : hiveStatement.getQueryLog()) {
-                beeLine.info(log);
-              }
-              Thread.sleep(DEFAULT_QUERY_PROGRESS_INTERVAL);
-            } catch (SQLException e) {
-              beeLine.error(new SQLWarning(e));
-              return;
-            } catch (InterruptedException e) {
-              beeLine.debug("Getting log thread is interrupted, since query is done!");
-              return;
-            }
-          }
-        }
-      };
-      return runnable;
-    } else {
-      beeLine.debug("The statement instance is not HiveStatement type: " + statement.getClass());
-      return new Runnable() {
-        @Override
-        public void run() {
-          // do nothing.
-        }
-      };
-    }
-  }
-
-  private void showRemainingLogsIfAny(Statement statement) {
-    if (statement instanceof HiveStatement) {
-      HiveStatement hiveStatement = (HiveStatement) statement;
-      List<String> logs;
-      do {
-        try {
-          logs = hiveStatement.getQueryLog();
-        } catch (SQLException e) {
-          beeLine.error(new SQLWarning(e));
-          return;
-        }
-        for (String log : logs) {
-          beeLine.info(log);
-        }
-      } while (logs.size() > 0);
-    } else {
-      beeLine.debug("The statement instance is not HiveStatement type: " + statement.getClass());
-    }
-  }
 
   public boolean quit(String line) {
     beeLine.setExit(true);

Modified: hive/branches/spark/bin/ext/beeline.sh
URL: http://svn.apache.org/viewvc/hive/branches/spark/bin/ext/beeline.sh?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/bin/ext/beeline.sh (original)
+++ hive/branches/spark/bin/ext/beeline.sh Mon Oct  6 03:44:13 2014
@@ -19,17 +19,11 @@ export SERVICE_LIST="${SERVICE_LIST}${TH
 
 beeline () {
   CLASS=org.apache.hive.beeline.BeeLine;
-
-  # include only the beeline client jar and its dependencies
-  beelineJarPath=`ls ${HIVE_LIB}/hive-beeline-*.jar`
-  superCsvJarPath=`ls ${HIVE_LIB}/super-csv-*.jar`
-  jlineJarPath=`ls ${HIVE_LIB}/jline-*.jar`
-  jdbcStandaloneJarPath=`ls ${HIVE_LIB}/hive-jdbc-*-standalone.jar`
-  export HADOOP_CLASSPATH=${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}
-
-  exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@"
+  execHiveCmd $CLASS "$@"
 }
 
 beeline_help () {
-  beeline "--help"
+  CLASS=org.apache.hive.beeline.BeeLine;
+  execHiveCmd $CLASS "--help"
 } 
+

Modified: hive/branches/spark/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/pom.xml?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/common/pom.xml (original)
+++ hive/branches/spark/common/pom.xml Mon Oct  6 03:44:13 2014
@@ -72,12 +72,6 @@
     </dependency>
     <!-- test inter-project -->
     <dependency>
-      <groupId>com.google.code.tempus-fugit</groupId>
-      <artifactId>tempus-fugit</artifactId>
-      <version>${tempus-fugit.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Oct  6 03:44:13 2014
@@ -649,9 +649,6 @@ public class HiveConf extends Configurat
     HIVEJOINCACHESIZE("hive.join.cache.size", 25000,
         "How many rows in the joining tables (except the streaming table) should be cached in memory."),
 
-    // CBO related
-    HIVE_CBO_ENABLED("hive.cbo.enable", false, "Flag to control enabling Cost Based Optimizations using Optiq framework."),
-
     // hive.mapjoin.bucket.cache.size has been replaced by hive.smbjoin.cache.row,
     // need to remove by hive .13. Also, do not change default (see SMB operator)
     HIVEMAPJOINBUCKETCACHESIZE("hive.mapjoin.bucket.cache.size", 100, ""),
@@ -1199,6 +1196,13 @@ public class HiveConf extends Configurat
         "Average row size is computed from average column size of all columns in the row. In the absence\n" +
         "of column statistics and for variable length complex columns like map, the average number of\n" +
         "entries/values can be specified using this config."),
+    // to accurately compute statistics for GROUPBY map side parallelism needs to be known
+    HIVE_STATS_MAP_SIDE_PARALLELISM("hive.stats.map.parallelism", 1,
+        "Hive/Tez optimizer estimates the data size flowing through each of the operators.\n" +
+        "For GROUPBY operator, to accurately compute the data size map-side parallelism needs to\n" +
+        "be known. By default, this value is set to 1 since optimizer is not aware of the number of\n" +
+        "mappers during compile-time. This Hive config can be used to specify the number of mappers\n" +
+        "to be used for data size computation of GROUPBY operator."),
     // statistics annotation fetches stats for each partition, which can be expensive. turning
     // this off will result in basic sizes being fetched from namenode instead
     HIVE_STATS_FETCH_PARTITION_STATS("hive.stats.fetch.partition.stats", true,
@@ -1509,8 +1513,8 @@ public class HiveConf extends Configurat
         "The parent node in ZooKeeper used by HiveServer2 when supporting dynamic service discovery."),
     // HiveServer2 global init file location
     HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION("hive.server2.global.init.file.location", "${env:HIVE_CONF_DIR}",
-        "Either the location of a HS2 global init file or a directory containing a .hiverc file. If the \n" +
-        "property is set, the value must be a valid path to an init file or directory where the init file is located."),
+        "The location of HS2 global init file (.hiverc).\n" +
+        "If the property is reset, the value must be a valid path where the init file is located."),
     HIVE_SERVER2_TRANSPORT_MODE("hive.server2.transport.mode", "binary", new StringSet("binary", "http"),
         "Transport mode of HiveServer2."),
     HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", "",
@@ -1718,9 +1722,6 @@ public class HiveConf extends Configurat
     HIVE_VECTORIZATION_REDUCE_ENABLED("hive.vectorized.execution.reduce.enabled", true,
             "This flag should be set to true to enable vectorized mode of the reduce-side of query execution.\n" +
             "The default value is true."),
-    HIVE_VECTORIZATION_REDUCE_GROUPBY_ENABLED("hive.vectorized.execution.reduce.groupby.enabled", true,
-            "This flag should be set to true to enable vectorized mode of the reduce-side GROUP BY query execution.\n" +
-            "The default value is true."),
     HIVE_VECTORIZATION_GROUPBY_CHECKINTERVAL("hive.vectorized.groupby.checkinterval", 100000,
         "Number of entries added to the group by aggregation hash before a recomputation of average entry size is performed."),
     HIVE_VECTORIZATION_GROUPBY_MAXENTRIES("hive.vectorized.groupby.maxentries", 1000000,

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java Mon Oct  6 03:44:13 2014
@@ -18,19 +18,10 @@
 
 package org.apache.hadoop.hive.common.type;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
-
-public class TestHiveChar {
-
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
+import junit.framework.TestCase;
+
+public class TestHiveChar extends TestCase {
+
   public void testBasic() {
     HiveChar hc = new HiveChar("abc", 10);
     assertEquals("abc       ", hc.toString());
@@ -56,9 +47,6 @@ public class TestHiveChar {
     assertEquals(3, hc.getCharacterLength());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testStringLength() {
     HiveChar hc = new HiveChar();
 
@@ -72,9 +60,6 @@ public class TestHiveChar {
     assertEquals("0123456789     ", hc.toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testComparison() {
     HiveChar hc1 = new HiveChar();
     HiveChar hc2 = new HiveChar();

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Mon Oct  6 03:44:13 2014
@@ -20,19 +20,12 @@ package org.apache.hadoop.hive.common.ty
 import java.math.BigDecimal;
 import java.math.BigInteger;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
+import org.junit.Assert;
+import org.junit.Test;
 
 public class TestHiveDecimal {
 
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testPrecisionScaleEnforcement() {
     String decStr = "1786135888657847525803324040144343378.09799306448796128931113691624";
     HiveDecimal dec = HiveDecimal.create(decStr);
@@ -89,8 +82,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testMultiply() {
     HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
     HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
@@ -114,8 +105,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testPow() {
     HiveDecimal dec = HiveDecimal.create("3.00001415926");
     Assert.assertEquals(dec.pow(2), dec.multiply(dec));
@@ -129,8 +118,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testDivide() {
     HiveDecimal dec1 = HiveDecimal.create("3.14");
     HiveDecimal dec2 = HiveDecimal.create("3");
@@ -146,8 +133,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testPlus() {
     HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
     HiveDecimal dec2 = HiveDecimal.create("1");
@@ -160,8 +145,6 @@ public class TestHiveDecimal {
 
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testSubtract() {
       HiveDecimal dec1 = HiveDecimal.create("3.140");
       HiveDecimal dec2 = HiveDecimal.create("1.00");
@@ -169,8 +152,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testPosMod() {
     HiveDecimal hd1 = HiveDecimal.create("-100.91");
     HiveDecimal hd2 = HiveDecimal.create("9.8");
@@ -179,16 +160,12 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testHashCode() {
       Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
       Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testException() {
     HiveDecimal dec = HiveDecimal.create("3.1415.926");
     Assert.assertNull(dec);
@@ -197,8 +174,6 @@ public class TestHiveDecimal {
   }
 
   @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testBinaryConversion() {
     testBinaryConversion("0.00");
     testBinaryConversion("-12.25");

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java Mon Oct  6 03:44:13 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.common.type;
 
+import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.LogUtils;
@@ -27,15 +28,8 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.util.Random;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
-
-public class TestHiveVarchar {
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
+public class TestHiveVarchar extends TestCase {
   public TestHiveVarchar() {
     super();
   }
@@ -71,9 +65,6 @@ public class TestHiveVarchar {
     }
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testStringLength() throws Exception {
     int strLen = 20;
     int[] lengths = { 15, 20, 25 };
@@ -133,9 +124,6 @@ public class TestHiveVarchar {
     assertEquals(5, vc1.getCharacterLength());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveVarchar hc1 = new HiveVarchar("abcd", 20);
     HiveVarchar hc2 = new HiveVarchar("abcd", 20);

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
@@ -33,8 +32,6 @@ import org.apache.hadoop.hive.ql.exec.UD
  * more efficient.
  * 
  */
-@Description(name = "example_avg",
-value = "_FUNC_(col) - Example UDAF to compute average")
 public final class UDAFExampleAvg extends UDAF {
 
   /**