You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/24 05:43:57 UTC

svn commit: r1620103 [1/27] - in /hive/branches/spark: ./ accumulo-handler/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ data/files/ hcatalog/streami...

Author: brock
Date: Sun Aug 24 03:43:48 2014
New Revision: 1620103

URL: http://svn.apache.org/r1620103
Log:
Merge from trunk into spark

Added:
    hive/branches/spark/accumulo-handler/
      - copied from r1619026, hive/trunk/accumulo-handler/
    hive/branches/spark/data/files/extrapolate_stats_full.txt
      - copied unchanged from r1620102, hive/trunk/data/files/extrapolate_stats_full.txt
    hive/branches/spark/data/files/extrapolate_stats_partial.txt
      - copied unchanged from r1620102, hive/trunk/data/files/extrapolate_stats_partial.txt
    hive/branches/spark/data/files/kv10.txt
      - copied unchanged from r1619026, hive/trunk/data/files/kv10.txt
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java
      - copied unchanged from r1618940, hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataSerializer.java
      - copied unchanged from r1618940, hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataSerializer.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/
      - copied from r1619026, hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/
    hive/branches/spark/metastore/scripts/upgrade/derby/019-HIVE-7784.derby.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/derby/019-HIVE-7784.derby.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/002-HIVE-7784.mssql.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/mssql/002-HIVE-7784.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mysql/019-HIVE-7784.mysql.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/mysql/019-HIVE-7784.mysql.sql
    hive/branches/spark/metastore/scripts/upgrade/oracle/020-HIVE-7784.oracle.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/oracle/020-HIVE-7784.oracle.sql
    hive/branches/spark/metastore/scripts/upgrade/postgres/019-HIVE-7784.postgres.sql
      - copied unchanged from r1619596, hive/trunk/metastore/scripts/upgrade/postgres/019-HIVE-7784.postgres.sql
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
      - copied unchanged from r1620102, hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
      - copied unchanged from r1620102, hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/LinearExtrapolatePartStatus.java
      - copied unchanged from r1620102, hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/LinearExtrapolatePartStatus.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
      - copied unchanged from r1619487, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
    hive/branches/spark/ql/src/test/queries/clientnegative/create_view_failure10.q
      - copied unchanged from r1619487, hive/trunk/ql/src/test/queries/clientnegative/create_view_failure10.q
    hive/branches/spark/ql/src/test/queries/clientpositive/analyze_tbl_part.q
      - copied unchanged from r1620102, hive/trunk/ql/src/test/queries/clientpositive/analyze_tbl_part.q
    hive/branches/spark/ql/src/test/queries/clientpositive/decimal_trailing.q
      - copied unchanged from r1619026, hive/trunk/ql/src/test/queries/clientpositive/decimal_trailing.q
    hive/branches/spark/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
      - copied unchanged from r1620102, hive/trunk/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
    hive/branches/spark/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
      - copied unchanged from r1620102, hive/trunk/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
    hive/branches/spark/ql/src/test/queries/clientpositive/parquet_join.q
      - copied unchanged from r1619487, hive/trunk/ql/src/test/queries/clientpositive/parquet_join.q
    hive/branches/spark/ql/src/test/queries/clientpositive/tez_union_decimal.q
      - copied unchanged from r1619487, hive/trunk/ql/src/test/queries/clientpositive/tez_union_decimal.q
    hive/branches/spark/ql/src/test/results/clientnegative/create_view_failure10.q.out
      - copied unchanged from r1619487, hive/trunk/ql/src/test/results/clientnegative/create_view_failure10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
      - copied unchanged from r1620102, hive/trunk/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_trailing.q.out
      - copied unchanged from r1619026, hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
      - copied unchanged from r1620102, hive/trunk/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
      - copied unchanged from r1620102, hive/trunk/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out
      - copied, changed from r1619487, hive/trunk/ql/src/test/results/clientpositive/parquet_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union_decimal.q.out
      - copied unchanged from r1619487, hive/trunk/ql/src/test/results/clientpositive/tez/tez_union_decimal.q.out
Removed:
    hive/branches/spark/ql/src/test/queries/clientnegative/parquet_char.q
    hive/branches/spark/ql/src/test/queries/clientnegative/parquet_varchar.q
    hive/branches/spark/ql/src/test/results/clientnegative/parquet_char.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/parquet_timestamp.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/parquet_varchar.q.out
Modified:
    hive/branches/spark/   (props changed)
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/branches/spark/data/files/parquet_types.txt
    hive/branches/spark/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatAddPartitionDesc.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/spark/itests/qtest/pom.xml
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties
    hive/branches/spark/itests/util/pom.xml
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
    hive/branches/spark/metastore/if/hive_metastore.thrift
    hive/branches/spark/metastore/scripts/upgrade/derby/hive-schema-0.14.0.derby.sql
    hive/branches/spark/metastore/scripts/upgrade/derby/upgrade-0.13.0-to-0.14.0.derby.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade.order.mssql
    hive/branches/spark/metastore/scripts/upgrade/mysql/hive-schema-0.14.0.mysql.sql
    hive/branches/spark/metastore/scripts/upgrade/mysql/upgrade-0.13.0-to-0.14.0.mysql.sql
    hive/branches/spark/metastore/scripts/upgrade/oracle/hive-schema-0.14.0.oracle.sql
    hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql
    hive/branches/spark/metastore/scripts/upgrade/postgres/hive-schema-0.14.0.postgres.sql
    hive/branches/spark/metastore/scripts/upgrade/postgres/upgrade-0.13.0-to-0.14.0.postgres.sql
    hive/branches/spark/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
    hive/branches/spark/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
    hive/branches/spark/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
    hive/branches/spark/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
    hive/branches/spark/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
    hive/branches/spark/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
    hive/branches/spark/metastore/src/gen/thrift/gen-php/metastore/Types.php
    hive/branches/spark/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
    hive/branches/spark/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
    hive/branches/spark/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
    hive/branches/spark/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
    hive/branches/spark/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
    hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
    hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
    hive/branches/spark/packaging/pom.xml
    hive/branches/spark/pom.xml
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/AcidOutputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/RecordIdentifier.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerWriterV2.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SerializationUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ArrayWritableGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLag.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLead.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_rename_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/literal_decimal.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_decimal.q
    hive/branches/spark/ql/src/test/queries/clientpositive/parquet_types.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing.q
    hive/branches/spark/ql/src/test/results/clientnegative/authorization_create_func1.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/sortmerge_mapjoin_mismatch_1.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_assert_true.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_assert_true2.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_local_resource.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/allcolref_in_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_rename_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_create_func1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join28.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join29.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join32.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_smb_mapjoin_14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/auto_sortmerge_join_13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal_native.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/char_pad_convert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cluster.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/column_access_stats.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/constprog2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cross_product_check_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cross_product_check_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_precision.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/explain_logical.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/filter_join_breaktask.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/index_auto_self_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/innerjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join23.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join26.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join27.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join32.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join32_lessSize.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join33.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join36.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join37.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join38.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join39.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join40.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join41.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_alt_syntax.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_filters_overlap.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_hive_626.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_map_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_nullsafe.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_rc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_reorder.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_reorder2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_reorder3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_reorder4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_star.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_thrift.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_vc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_cp.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_outer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/limit_pushdown_negative.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lineage1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/literal_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/louter_join_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mapjoin1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mergejoins.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/mergejoins_mixed.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/multiMapJoin1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/multi_join_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/no_hooks.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nonmr_fetch.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/optional_outer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_ppd_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/outer_join_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_decimal1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/pcr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_join5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_join_filter.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_outer_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_outer_join2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_outer_join3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_outer_join5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_udf_case.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_vc.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/quotedid_skew.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/regex_col.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/router_join_ppr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/sample8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt16.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt18.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoinopt8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin_7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notexists.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table_windowing_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/auto_join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/join1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/mrr.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_join_tests.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_joins_explain.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_data_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_case.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_using.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_when.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_json_tuple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_stack.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union20.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union22.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union24.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union26.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union27.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union32.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_12.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_13.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_remove_14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_top_level.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_between_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_data_types.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_bucketmapjoin1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_context.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_nested_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_rank.q.out
    hive/branches/spark/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/cast1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/groupby6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input20.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input7.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input8.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input9.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input_part1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join7.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/subq.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml
    hive/branches/spark/ql/src/test/results/compiler/plan/union.q.xml
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaResolutionProblem.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestGenericAvroRecordWritable.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestSchemaReEncoder.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestThatEvolvedSchemasActAsWeWant.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/CLIService.java

Propchange: hive/branches/spark/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1618297-1618940,1618942-1619026,1619028-1619487,1619489-1619596,1619598-1620102

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java Sun Aug 24 03:43:48 2014
@@ -108,7 +108,7 @@ public class GenHiveTemplate extends Tas
         continue;
       }
       Element property = appendElement(root, "property", null);
-      appendElement(property, "key", confVars.varname);
+      appendElement(property, "name", confVars.varname);
       appendElement(property, "value", confVars.getDefaultExpr());
       appendElement(property, "description", normalize(confVars.getDescription()));
       // wish to add new line here.

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java Sun Aug 24 03:43:48 2014
@@ -103,6 +103,13 @@ public final class Decimal128 extends Nu
   private short scale;
 
   /**
+   * This is the actual scale detected from the value passed to this Decimal128.
+   * The value is always equals or less than #scale. It is used to return the correct
+   * decimal string from {@link #getHiveDecimalString()}.
+   */
+  private short actualScale;
+
+  /**
    * -1 means negative, 0 means zero, 1 means positive.
    *
    * @serial
@@ -127,6 +134,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128();
     this.scale = 0;
     this.signum = 0;
+    this.actualScale = 0;
   }
 
   /**
@@ -139,6 +147,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
+    this.actualScale = o.actualScale;
   }
 
   /**
@@ -178,6 +187,7 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.unscaledValue = new UnsignedInt128(unscaledVal);
     this.scale = scale;
+    this.actualScale = scale;
     if (unscaledValue.isZero()) {
       this.signum = 0;
     } else {
@@ -264,6 +274,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue.update(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
+    this.actualScale = o.actualScale;
     return this;
   }
 
@@ -292,7 +303,7 @@ public final class Decimal128 extends Nu
 
   /**
    * Update the value of this object with the given {@code long} with the given
-   * scal.
+   * scale.
    *
    * @param val
    *          {@code long} value to be set to {@code Decimal128}.
@@ -314,6 +325,8 @@ public final class Decimal128 extends Nu
     if (scale != 0) {
       changeScaleDestructive(scale);
     }
+    // set actualScale to 0 because there is no fractional digits on integer values
+    this.actualScale = 0;
     return this;
   }
 
@@ -341,6 +354,11 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.scale = scale;
 
+    // Obtains the scale of the double value to keep a record of the original
+    // scale. This will be used to print the HiveDecimal string with the
+    // correct value scale.
+    this.actualScale = (short) BigDecimal.valueOf(val).scale();
+
     // Translate the double into sign, exponent and significand, according
     // to the formulae in JLS, Section 20.10.22.
     long valBits = Double.doubleToLongBits(val);
@@ -364,6 +382,10 @@ public final class Decimal128 extends Nu
       exponent++;
     }
 
+    // Calculate the real number of fractional digits from the double value
+    this.actualScale -= (exponent > 0) ? exponent : 0;
+    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
+
     // so far same as java.math.BigDecimal, but the scaling below is
     // specific to ANSI SQL Numeric.
 
@@ -426,6 +448,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update(IntBuffer buf, int precision) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(buf, precision);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -442,6 +465,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -458,6 +482,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -474,6 +499,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -490,6 +516,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -510,6 +537,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update(int[] array, int offset, int precision) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(array, offset + 1, precision);
     return this;
@@ -527,6 +555,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(array, offset + 1);
     return this;
@@ -544,6 +573,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(array, offset + 1);
     return this;
@@ -561,6 +591,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(array, offset + 1);
     return this;
@@ -578,6 +609,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(array, offset + 1);
     return this;
@@ -600,7 +632,6 @@ public final class Decimal128 extends Nu
    * @param scale
    */
   public Decimal128 update(BigInteger bigInt, short scale) {
-    this.scale = scale;
     this.signum = (byte) bigInt.compareTo(BigInteger.ZERO);
     if (signum == 0) {
       update(0);
@@ -609,6 +640,9 @@ public final class Decimal128 extends Nu
     } else {
       unscaledValue.update(bigInt);
     }
+    this.scale = scale;
+    this.actualScale = scale;
+
     return this;
   }
 
@@ -731,6 +765,9 @@ public final class Decimal128 extends Nu
       this.unscaledValue.addDestructive(accumulated);
     }
 
+    this.actualScale = (short) (fractionalDigits - exponent);
+    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
+
     int scaleAdjust = scale - fractionalDigits + exponent;
     if (scaleAdjust > 0) {
       this.unscaledValue.scaleUpTenDestructive((short) scaleAdjust);
@@ -924,6 +961,7 @@ public final class Decimal128 extends Nu
       this.unscaledValue.scaleUpTenDestructive((short) -scaleDown);
     }
     this.scale = scale;
+    this.actualScale = scale;
 
     this.unscaledValue.throwIfExceedsTenToThirtyEight();
   }
@@ -1125,6 +1163,7 @@ public final class Decimal128 extends Nu
     if (this.signum == 0 || right.signum == 0) {
       this.zeroClear();
       this.scale = newScale;
+      this.actualScale = newScale;
       return;
     }
 
@@ -1154,6 +1193,7 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
+    this.actualScale = newScale;
     this.signum = (byte) (this.signum * right.signum);
     if (this.unscaledValue.isZero()) {
       this.signum = 0; // because of scaling down, this could happen
@@ -1244,6 +1284,7 @@ public final class Decimal128 extends Nu
     }
     if (this.signum == 0) {
       this.scale = newScale;
+      this.actualScale = newScale;
       remainder.update(this);
       return;
     }
@@ -1271,6 +1312,7 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
+    this.actualScale = newScale;
     this.signum = (byte) (this.unscaledValue.isZero() ? 0
         : (this.signum * right.signum));
     remainder.scale = scale;
@@ -1731,17 +1773,13 @@ public final class Decimal128 extends Nu
   private int [] tmpArray = new int[2];
 
   /**
-   * Returns the string representation of this value. It discards the trailing zeros
-   * in the fractional part to match the HiveDecimal's string representation. However,
+   * Returns the string representation of this value. It returns the original
+   * {@code actualScale} fractional part when this value was created. However,
    * don't use this string representation for the reconstruction of the object.
    *
    * @return string representation of this value
    */
   public String getHiveDecimalString() {
-    if (this.signum == 0) {
-      return "0";
-    }
-
     StringBuilder buf = new StringBuilder(50);
     if (this.signum < 0) {
       buf.append('-');
@@ -1752,32 +1790,40 @@ public final class Decimal128 extends Nu
     int trailingZeros = tmpArray[1];
     int numIntegerDigits = unscaledLength - this.scale;
     if (numIntegerDigits > 0) {
-
       // write out integer part first
       // then write out fractional part
       for (int i=0; i < numIntegerDigits; i++) {
         buf.append(unscaled[i]);
       }
 
-      if (this.scale > trailingZeros) {
+      if (this.actualScale > 0) {
         buf.append('.');
-        for (int i = numIntegerDigits; i < (unscaledLength - trailingZeros); i++) {
+
+        if (trailingZeros > this.actualScale) {
+          for (int i=0; i < (trailingZeros - this.scale); i++) {
+            buf.append("0");
+          }
+        }
+
+        for (int i = numIntegerDigits; i < (numIntegerDigits + this.actualScale); i++) {
           buf.append(unscaled[i]);
         }
       }
     } else {
-
       // no integer part
       buf.append('0');
 
-      if (this.scale > trailingZeros) {
-
+      if (this.actualScale > 0) {
         // fractional part has, starting with zeros
         buf.append('.');
-        for (int i = unscaledLength; i < this.scale; ++i) {
-          buf.append('0');
+
+        if (this.actualScale > trailingZeros) {
+          for (int i = unscaledLength; i < this.scale; ++i) {
+            buf.append('0');
+          }
         }
-        for (int i = 0; i < (unscaledLength - trailingZeros); i++) {
+
+        for (int i = 0; i < (numIntegerDigits + this.actualScale); i++) {
           buf.append(unscaled[i]);
         }
       }
@@ -1836,9 +1882,10 @@ public final class Decimal128 extends Nu
 
   @Override
   public String toString() {
-    return toFormalString() + "(Decimal128: scale=" + scale + ", signum="
-        + signum + ", BigDecimal.toString=" + toBigDecimal().toString()
-        + ", unscaledValue=[" + unscaledValue.toString() + "])";
+    return toFormalString() + "(Decimal128: scale=" + scale + ", actualScale="
+        + this.actualScale + ", signum=" + signum + ", BigDecimal.toString="
+        + toBigDecimal().toString() + ", unscaledValue=[" + unscaledValue.toString()
+        + "])";
   }
 
   /**
@@ -1956,6 +2003,7 @@ public final class Decimal128 extends Nu
    */
   public Decimal128 fastUpdateFromInternalStorage(byte[] internalStorage, short scale) {
     this.scale = scale;
+    this.actualScale = scale;
     this.signum = this.unscaledValue.fastUpdateFromInternalStorage(internalStorage);
 
     return this;

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Sun Aug 24 03:43:48 2014
@@ -30,7 +30,6 @@ import java.math.RoundingMode;
 public class HiveDecimal implements Comparable<HiveDecimal> {
   public static final int MAX_PRECISION = 38;
   public static final int MAX_SCALE = 38;
-
   /**
    * Default precision/scale when user doesn't specify in the column metadata, such as
    * decimal and decimal(8).
@@ -113,7 +112,7 @@ public class HiveDecimal implements Comp
 
   @Override
   public int hashCode() {
-    return bd.hashCode();
+    return trim(bd).hashCode();
   }
 
   @Override
@@ -169,7 +168,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal multiply(HiveDecimal dec) {
-    return create(bd.multiply(dec.bd), false);
+    return create(bd.multiply(dec.bd), true);
   }
 
   public BigInteger unscaledValue() {
@@ -202,7 +201,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal divide(HiveDecimal dec) {
-    return create(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP), true);
+    return create(trim(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP)), true);
   }
 
   /**
@@ -232,8 +231,6 @@ public class HiveDecimal implements Comp
       return null;
     }
 
-    bd = trim(bd);
-
     int intDigits = bd.precision() - bd.scale();
 
     if (intDigits > MAX_PRECISION) {
@@ -244,8 +241,6 @@ public class HiveDecimal implements Comp
     if (bd.scale() > maxScale ) {
       if (allowRounding) {
         bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
-        // Trimming is again necessary, because rounding may introduce new trailing 0's.
-        bd = trim(bd);
       } else {
         bd = null;
       }
@@ -259,8 +254,6 @@ public class HiveDecimal implements Comp
       return null;
     }
 
-    bd = trim(bd);
-
     int maxIntDigits = maxPrecision - maxScale;
     int intDigits = bd.precision() - bd.scale();
     if (intDigits > maxIntDigits) {

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java Sun Aug 24 03:43:48 2014
@@ -811,7 +811,7 @@ public class TestDecimal128 {
     assertEquals("0.00923076923", d2.getHiveDecimalString());
 
     Decimal128 d3 = new Decimal128("0.00923076000", (short) 15);
-    assertEquals("0.00923076", d3.getHiveDecimalString());
+    assertEquals("0.00923076000", d3.getHiveDecimalString());
 
     Decimal128 d4 = new Decimal128("4294967296.01", (short) 15);
     assertEquals("4294967296.01", d4.getHiveDecimalString());
@@ -849,15 +849,37 @@ public class TestDecimal128 {
     d11.update(hd6.bigDecimalValue());
     assertEquals(hd6.toString(), d11.getHiveDecimalString());
 
+    // The trailing zeros from a double value are trimmed automatically
+    // by the double data type
     Decimal128 d12 = new Decimal128(27.000, (short)3);
-    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000"));
+    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.0"));
     assertEquals(hd7.toString(), d12.getHiveDecimalString());
-    assertEquals("27", d12.getHiveDecimalString());
+    assertEquals("27.0", d12.getHiveDecimalString());
 
     Decimal128 d13 = new Decimal128(1234123000, (short)3);
     HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000"));
     assertEquals(hd8.toString(), d13.getHiveDecimalString());
     assertEquals("1234123000", d13.getHiveDecimalString());
+
+    Decimal128 d14 = new Decimal128(1.33e4, (short)10);
+    HiveDecimal hd9 = HiveDecimal.create(new BigDecimal("1.33e4"));
+    assertEquals(hd9.toString(), d14.getHiveDecimalString());
+    assertEquals("13300", d14.getHiveDecimalString());
+
+    Decimal128 d15 = new Decimal128(1.33e-4, (short)10);
+    HiveDecimal hd10 = HiveDecimal.create(new BigDecimal("1.33e-4"));
+    assertEquals(hd10.toString(), d15.getHiveDecimalString());
+    assertEquals("0.000133", d15.getHiveDecimalString());
+
+    Decimal128 d16 = new Decimal128("1.33e4", (short)10);
+    HiveDecimal hd11 = HiveDecimal.create(new BigDecimal("1.33e4"));
+    assertEquals(hd11.toString(), d16.getHiveDecimalString());
+    assertEquals("13300", d16.getHiveDecimalString());
+
+    Decimal128 d17 = new Decimal128("1.33e-4", (short)10);
+    HiveDecimal hd12 = HiveDecimal.create(new BigDecimal("1.33e-4"));
+    assertEquals(hd12.toString(), d17.getHiveDecimalString());
+    assertEquals("0.000133", d17.getHiveDecimalString());
   }
 
   @Test

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Sun Aug 24 03:43:48 2014
@@ -50,25 +50,35 @@ public class TestHiveDecimal {
     Assert.assertEquals("-1786135888657847525803324040144343378.1", dec.toString());
 
     dec = HiveDecimal.create("005.34000");
-    Assert.assertEquals(dec.precision(), 3);
-    Assert.assertEquals(dec.scale(), 2);
+    Assert.assertEquals(dec.precision(), 6);
+    Assert.assertEquals(dec.scale(), 5);
 
     dec = HiveDecimal.create("178613588865784752580332404014434337809799306448796128931113691624");
     Assert.assertNull(dec);
-  }
 
-  @Test
-  public void testTrailingZeroRemovalAfterEnforcement() {
-    String decStr = "8.090000000000000000000000000000000000000123456";
-    HiveDecimal dec = HiveDecimal.create(decStr);
-    Assert.assertEquals("8.09", dec.toString());
+    // Leaving trailing zeros
+    Assert.assertEquals("0.0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0"), 2, 1).toString());
+    Assert.assertEquals("0.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.00"), 3, 2).toString());
+    Assert.assertEquals("0.0000", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0000"), 10, 4).toString());
+    Assert.assertEquals("100.00000", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00000"), 15, 5).toString());
+    Assert.assertEquals("100.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00"), 15, 5).toString());
+
+    // Rounding numbers
+    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.012"), 3, 2).toString());
+    Assert.assertEquals("0.02", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.015"), 3, 2).toString());
+    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0145"), 3, 2).toString());
+
+    // Integers with no scale values are not modified (zeros are not null)
+    Assert.assertEquals("0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0"), 1, 0).toString());
+    Assert.assertEquals("30", HiveDecimal.enforcePrecisionScale(new BigDecimal("30"), 2, 0).toString());
+    Assert.assertEquals("5", HiveDecimal.enforcePrecisionScale(new BigDecimal("5"), 3, 2).toString());
   }
 
   @Test
   public void testMultiply() {
     HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
     HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
-    Assert.assertNull(dec1.multiply(dec2));
+    Assert.assertNotNull(dec1.multiply(dec2));
 
     dec1 = HiveDecimal.create("178613588865784752580323232232323444.4");
     dec2 = HiveDecimal.create("178613588865784752580302323232.3");
@@ -77,6 +87,14 @@ public class TestHiveDecimal {
     dec1 = HiveDecimal.create("47.324");
     dec2 = HiveDecimal.create("9232.309");
     Assert.assertEquals("436909.791116", dec1.multiply(dec2).toString());
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("3.14000", dec1.multiply(dec2).toString());
+
+    dec1 = HiveDecimal.create("43.010");
+    dec2 = HiveDecimal.create("2");
+    Assert.assertEquals("86.020", dec1.multiply(dec2).toString());
   }
 
   @Test
@@ -87,6 +105,9 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("0.000017861358882");
     dec1 = dec1.pow(3);
     Assert.assertNull(dec1);
+
+    dec1 = HiveDecimal.create("3.140");
+    Assert.assertEquals("9.859600", dec1.pow(2).toString());
   }
 
   @Test
@@ -94,6 +115,14 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("3.14");
     HiveDecimal dec2 = HiveDecimal.create("3");
     Assert.assertNotNull(dec1.divide(dec2));
+
+    dec1 = HiveDecimal.create("15");
+    dec2 = HiveDecimal.create("5");
+    Assert.assertEquals("3", dec1.divide(dec2).toString());
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("3.14", dec1.divide(dec2).toString());
   }
 
   @Test
@@ -101,6 +130,18 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
     HiveDecimal dec2 = HiveDecimal.create("1");
     Assert.assertNotNull(dec1.add(dec2));
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("4.140", dec1.add(dec2).toString());
+  }
+
+
+  @Test
+  public void testSubtract() {
+      HiveDecimal dec1 = HiveDecimal.create("3.140");
+      HiveDecimal dec2 = HiveDecimal.create("1.00");
+      Assert.assertEquals("2.140", dec1.subtract(dec2).toString());
   }
 
   @Test
@@ -112,6 +153,12 @@ public class TestHiveDecimal {
   }
 
   @Test
+  public void testHashCode() {
+      Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
+      Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
+  }
+
+  @Test
   public void testException() {
     HiveDecimal dec = HiveDecimal.create("3.1415.926");
     Assert.assertNull(dec);
@@ -121,7 +168,7 @@ public class TestHiveDecimal {
 
   @Test
   public void testBinaryConversion() {
-    testBinaryConversion("0.0");
+    testBinaryConversion("0.00");
     testBinaryConversion("-12.25");
     testBinaryConversion("234.79");
   }

Modified: hive/branches/spark/data/files/parquet_types.txt
URL: http://svn.apache.org/viewvc/hive/branches/spark/data/files/parquet_types.txt?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/data/files/parquet_types.txt (original)
+++ hive/branches/spark/data/files/parquet_types.txt Sun Aug 24 03:43:48 2014
@@ -1,21 +1,21 @@
-100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111
-101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222
-102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333
-103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444
-104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555
-105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666
-106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777
-107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888
-108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999
-109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101
-110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111
-111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121
-112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131
-113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141
-114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151
-115|1|1|1.0|4.5|tuv|2026-04-04 16:16:16.161616161
-116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171
-117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181
-118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191
-119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202
-120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212
+100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  
+101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab  
+102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc
+103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd
+104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde
+105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef
+106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg
+107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh
+108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|abcdefghijklmnop
+109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef
+110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede
+111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded
+112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd
+113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc
+114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b
+115|1|1|1.0|4.5|tuv|2026-04-04 16:16:16.161616161|rstuv|abcded
+116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded
+117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded
+118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede
+119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede
+120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde

Modified: hive/branches/spark/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java (original)
+++ hive/branches/spark/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java Sun Aug 24 03:43:48 2014
@@ -53,7 +53,7 @@ abstract class AbstractRecordWriter impl
   private int currentBucketId = 0;
   private final Path partitionPath;
 
-  final AcidOutputFormat<?> outf;
+  final AcidOutputFormat<?,?> outf;
 
   protected AbstractRecordWriter(HiveEndPoint endPoint, HiveConf conf)
           throws ConnectionError, StreamingException {
@@ -70,7 +70,7 @@ abstract class AbstractRecordWriter impl
                 + endPoint);
       }
       String outFormatName = this.tbl.getSd().getOutputFormat();
-      outf = (AcidOutputFormat<?>) ReflectionUtils.newInstance(Class.forName(outFormatName), conf);
+      outf = (AcidOutputFormat<?,?>) ReflectionUtils.newInstance(Class.forName(outFormatName), conf);
     } catch (MetaException e) {
       throw new ConnectionError(endPoint, e);
     } catch (NoSuchObjectException e) {

Modified: hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatAddPartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatAddPartitionDesc.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatAddPartitionDesc.java (original)
+++ hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatAddPartitionDesc.java Sun Aug 24 03:43:48 2014
@@ -18,18 +18,8 @@
  */
 package org.apache.hive.hcatalog.api;
 
-import java.util.ArrayList;
-import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -40,16 +30,33 @@ import org.slf4j.LoggerFactory;
 public class HCatAddPartitionDesc {
 
   private static final Logger LOG = LoggerFactory.getLogger(HCatAddPartitionDesc.class);
-  private String tableName;
-  private String dbName;
-  private String location;
-  private Map<String, String> partSpec;
+  private HCatPartition hcatPartition;
 
-  private HCatAddPartitionDesc(String dbName, String tbl, String loc, Map<String, String> spec) {
+  // The following data members are only required to support the deprecated constructor (and builder).
+  String dbName, tableName, location;
+  Map<String, String> partitionKeyValues;
+
+  private HCatAddPartitionDesc(HCatPartition hcatPartition) {
+    this.hcatPartition = hcatPartition;
+  }
+
+  private HCatAddPartitionDesc(String dbName, String tableName, String location, Map<String, String> partitionKeyValues) {
+    this.hcatPartition = null;
     this.dbName = dbName;
-    this.tableName = tbl;
-    this.location = loc;
-    this.partSpec = spec;
+    this.tableName = tableName;
+    this.location = location;
+    this.partitionKeyValues = partitionKeyValues;
+  }
+
+  HCatPartition getHCatPartition() {
+    return hcatPartition;
+  }
+
+  HCatPartition getHCatPartition(HCatTable hcatTable) throws HCatException {
+    assert hcatPartition == null : "hcatPartition should have been null at this point.";
+    assert dbName.equalsIgnoreCase(hcatTable.getDbName()) : "DB names don't match.";
+    assert tableName.equalsIgnoreCase(hcatTable.getTableName()) : "Table names don't match.";
+    return new HCatPartition(hcatTable, partitionKeyValues, location);
   }
 
   /**
@@ -57,18 +64,19 @@ public class HCatAddPartitionDesc {
    *
    * @return the location
    */
+  @Deprecated // @deprecated in favour of {@link HCatPartition.#getLocation()}. To be removed in Hive 0.16.
   public String getLocation() {
-    return this.location;
+    return hcatPartition == null? location : hcatPartition.getLocation();
   }
 
-
   /**
    * Gets the partition spec.
    *
    * @return the partition spec
    */
+  @Deprecated // @deprecated in favour of {@link HCatPartition.#getPartitionKeyValMap()}. To be removed in Hive 0.16.
   public Map<String, String> getPartitionSpec() {
-    return this.partSpec;
+    return hcatPartition == null? partitionKeyValues : hcatPartition.getPartitionKeyValMap();
   }
 
   /**
@@ -76,8 +84,9 @@ public class HCatAddPartitionDesc {
    *
    * @return the table name
    */
+  @Deprecated // @deprecated in favour of {@link HCatPartition.#getTableName()}. To be removed in Hive 0.16.
   public String getTableName() {
-    return this.tableName;
+    return hcatPartition == null? tableName : hcatPartition.getTableName();
   }
 
   /**
@@ -85,17 +94,14 @@ public class HCatAddPartitionDesc {
    *
    * @return the database name
    */
+  @Deprecated // @deprecated in favour of {@link HCatPartition.#getDatabaseName()}. To be removed in Hive 0.16.
   public String getDatabaseName() {
-    return this.dbName;
+    return hcatPartition == null? dbName : hcatPartition.getDatabaseName();
   }
 
   @Override
   public String toString() {
-    return "HCatAddPartitionDesc ["
-      + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
-      + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-      + (location != null ? "location=" + location + ", " : "location=null")
-      + (partSpec != null ? "partSpec=" + partSpec : "partSpec=null") + "]";
+    return "HCatAddPartitionDesc [" + hcatPartition + "]";
   }
 
   /**
@@ -108,61 +114,48 @@ public class HCatAddPartitionDesc {
    * @return the builder
    * @throws HCatException
    */
-  public static Builder create(String dbName, String tableName, String location,
-                 Map<String, String> partSpec) throws HCatException {
+  @Deprecated // @deprecated in favour of {@link HCatAddPartitionDesc.#create(HCatPartition)}. To be removed in Hive 0.16.
+  public static Builder create(String dbName,
+                               String tableName,
+                               String location,
+                               Map<String, String> partSpec
+                      ) throws HCatException {
+    LOG.error("Unsupported! HCatAddPartitionDesc requires HCatTable to be specified explicitly.");
     return new Builder(dbName, tableName, location, partSpec);
   }
 
-  Partition toHivePartition(Table hiveTable) throws HCatException {
-    Partition hivePtn = new Partition();
-    hivePtn.setDbName(this.dbName);
-    hivePtn.setTableName(this.tableName);
-
-    List<String> pvals = new ArrayList<String>();
-    for (FieldSchema field : hiveTable.getPartitionKeys()) {
-      String val = partSpec.get(field.getName());
-      if (val == null || val.length() == 0) {
-        throw new HCatException("create partition: Value for key "
-          + field.getName() + " is null or empty");
-      }
-      pvals.add(val);
-    }
-
-    hivePtn.setValues(pvals);
-    StorageDescriptor sd = new StorageDescriptor(hiveTable.getSd());
-    hivePtn.setSd(sd);
-    hivePtn.setParameters(hiveTable.getParameters());
-    if (this.location != null) {
-      hivePtn.getSd().setLocation(this.location);
-    } else {
-      String partName;
-      try {
-        partName = Warehouse.makePartName(
-          hiveTable.getPartitionKeys(), pvals);
-        LOG.info("Setting partition location to :" + partName);
-      } catch (MetaException e) {
-        throw new HCatException("Exception while creating partition name.", e);
-      }
-      Path partPath = new Path(hiveTable.getSd().getLocation(), partName);
-      hivePtn.getSd().setLocation(partPath.toString());
-    }
-    hivePtn.setCreateTime((int) (System.currentTimeMillis() / 1000));
-    hivePtn.setLastAccessTimeIsSet(false);
-    return hivePtn;
+  /**
+   * Constructs a Builder instance, using an HCatPartition object.
+   * @param partition An HCatPartition instance.
+   * @return A Builder object that can build an appropriate HCatAddPartitionDesc.
+   * @throws HCatException
+   */
+  public static Builder create(HCatPartition partition) throws HCatException {
+    return new Builder(partition);
   }
 
+  /**
+   * Builder class for constructing an HCatAddPartition instance.
+   */
   public static class Builder {
 
-    private String tableName;
-    private String location;
-    private Map<String, String> values;
-    private String dbName;
+    private HCatPartition hcatPartition;
+
+    // The following data members are only required to support the deprecated constructor (and builder).
+    String dbName, tableName, location;
+    Map<String, String> partitionSpec;
+
+    private Builder(HCatPartition hcatPartition) {
+      this.hcatPartition = hcatPartition;
+    }
 
-    private Builder(String dbName, String tableName, String location, Map<String, String> values) {
+    @Deprecated // To be removed in Hive 0.16.
+    private Builder(String dbName, String tableName, String location, Map<String, String> partitionSpec) {
+      this.hcatPartition = null;
       this.dbName = dbName;
       this.tableName = tableName;
       this.location = location;
-      this.values = values;
+      this.partitionSpec = partitionSpec;
     }
 
     /**
@@ -172,13 +165,9 @@ public class HCatAddPartitionDesc {
      * @throws HCatException
      */
     public HCatAddPartitionDesc build() throws HCatException {
-      if (this.dbName == null) {
-        this.dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
-      }
-      HCatAddPartitionDesc desc = new HCatAddPartitionDesc(
-        this.dbName, this.tableName, this.location,
-        this.values);
-      return desc;
+      return hcatPartition == null?
+                new HCatAddPartitionDesc(dbName, tableName, location, partitionSpec)
+              : new HCatAddPartitionDesc(hcatPartition);
     }
   }
 

Modified: hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java?rev=1620103&r1=1620102&r2=1620103&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java (original)
+++ hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java Sun Aug 24 03:43:48 2014
@@ -155,6 +155,64 @@ public abstract class HCatClient {
     throws HCatException;
 
   /**
+   * Updates the Table's whole schema (including column schema, I/O Formats, SerDe definitions, etc.)
+   * @param dbName The name of the database.
+   * @param tableName The name of the table.
+   * @param newTableDefinition The (new) definition of the table.
+   * @throws HCatException
+   */
+  public abstract void updateTableSchema(String dbName, String tableName, HCatTable newTableDefinition)
+      throws HCatException;
+
+  /**
+   * Serializer for HCatTable.
+   * @param hcatTable The HCatTable to be serialized into string form
+   * @return String representation of the HCatTable.
+   * @throws HCatException, on failure to serialize.
+   */
+  public abstract String serializeTable(HCatTable hcatTable) throws HCatException;
+
+  /**
+   * Deserializer for HCatTable.
+   * @param hcatTableStringRep The String representation of an HCatTable, presumably retrieved from {@link #serializeTable(HCatTable)}
+   * @return HCatTable reconstructed from the string
+   * throws HCatException, on failure to deserialize.
+   */
+  public abstract HCatTable deserializeTable(String hcatTableStringRep) throws HCatException;
+
+  /**
+   * Serializer for HCatPartition.
+   * @param hcatPartition The HCatPartition instance to be serialized.
+   * @return String representation of the HCatPartition.
+   * @throws HCatException, on failure to serialize.
+   */
+  public abstract String serializePartition(HCatPartition hcatPartition) throws HCatException;
+
+  /**
+   * Serializer for a list of HCatPartition.
+   * @param hcatPartitions The HCatPartitions to be serialized.
+   * @return A list of Strings, each representing an HCatPartition.
+   * @throws HCatException, on failure to serialize.
+   */
+  public abstract List<String> serializePartitions(List<HCatPartition> hcatPartitions) throws HCatException;
+
+  /**
+   * Deserializer for an HCatPartition.
+   * @param hcatPartitionStringRep The String representation of the HCatPartition, presumably retrieved from {@link #serializePartition(HCatPartition)}
+   * @return HCatPartition instance reconstructed from the string.
+   * @throws HCatException, on failure to deserialze.
+   */
+  public abstract HCatPartition deserializePartition(String hcatPartitionStringRep) throws HCatException;
+
+  /**
+   * Deserializer for a list of HCatPartition strings.
+   * @param hcatPartitionStringReps The list of HCatPartition strings to be deserialized.
+   * @return A list of HCatPartition instances, each reconstructed from an entry in the string-list.
+   * @throws HCatException, on failure to deserialize.
+   */
+  public abstract List<HCatPartition> deserializePartitions(List<String> hcatPartitionStringReps) throws HCatException;
+
+  /**
    * Creates the table like an existing table.
    *
    * @param dbName The name of the database.