You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zs...@apache.org on 2008/09/20 01:56:35 UTC

svn commit: r697291 [1/31] - in /hadoop/core/trunk: ./ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/ src/contrib/hive/metastore/if/ src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ src/contrib/hive/metastore...

Author: zshao
Date: Fri Sep 19 16:56:30 2008
New Revision: 697291

URL: http://svn.apache.org/viewvc?rev=697291&view=rev
Log:
HADOOP-4205. hive: metastore and ql to use the refactored SerDe library.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hadoop/core/trunk/src/contrib/hive/metastore/if/hive_metastore.thrift
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftMetaStore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-php/ThriftHiveMetastore.php
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-php/ThriftMetaStore.php
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-php/hive_metastore_constants.php
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-php/hive_metastore_types.php
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-py/hive_metastore/ThriftMetaStore.py
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-py/hive_metastore/constants.py
    hadoop/core/trunk/src/contrib/hive/metastore/src/gen-py/hive_metastore/ttypes.py
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/FileStore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreServer.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/model/org/apache/hadoop/hive/metastore/model/MSerDeInfo.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/model/package.jdo
    hadoop/core/trunk/src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/MetaStoreTestBase.java
    hadoop/core/trunk/src/contrib/hive/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CompositeHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/LabeledCompositeHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/NullHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/PrimitiveHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TableHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveObjectComparator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveObjectSerializer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NaiiveJSONSerializer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NaiiveSerializer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagHiveObjectComparator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagWritableComparableHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagWritableHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/WritableComparableHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/WritableHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
    hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct1.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct2.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/groupby6.q
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby7.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby8.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input12.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input13.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input14.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input15.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input2.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input3.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input4.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input5.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input6.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input7.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input8.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input9.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_part1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_testsequencefile.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl2.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl3.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join2.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join3.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join4.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join5.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join6.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join7.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join8.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample2.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample4.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample5.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample6.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample7.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/subq.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/udf1.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/union.q.out
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/cast1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby2.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby3.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby4.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby5.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby6.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input2.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input3.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input5.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input6.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input7.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input8.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input9.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_part1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join2.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join3.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample2.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample3.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample4.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample5.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample6.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample7.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/udf1.q.xml
    hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/union.q.xml
    hadoop/core/trunk/src/contrib/hive/serde/if/serde.thrift
    hadoop/core/trunk/src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java
    hadoop/core/trunk/src/contrib/hive/serde/src/gen-php/serde_constants.php
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardListObjectInspector.java
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
    hadoop/core/trunk/src/contrib/hive/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardPrimitiveObjectInspector.java
    hadoop/core/trunk/src/contrib/hive/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri Sep 19 16:56:30 2008
@@ -210,6 +210,9 @@
 
   IMPROVEMENTS
 
+    HADOOP-4205. hive: metastore and ql to use the refactored SerDe library.
+    (zshao)
+
     HADOOP-4106. libhdfs: add time, permission and user attribute support (part 2).
     (Pete Wyckoff through zshao)
 

Modified: hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Fri Sep 19 16:56:30 2008
@@ -82,30 +82,27 @@
         e.printStackTrace();
       }
 
-    } else {
+    } 
+    else {
       ret = qp.run(cmd);
       Vector<Vector<String>> res = new Vector<Vector<String>>();
-      while (qp.getResults(res))
-      {
-        SessionState ss = SessionState.get();
-        OutputStream out = ss.out;
-        try 
-        {
+      while (qp.getResults(res)) {
+        SessionState ss  = SessionState.get();
+        PrintStream out = ss.out;
 
-          for (Vector<String> row:res)
+        for (Vector<String> row:res)
+        {
+          boolean firstCol = true;
+          for (String col:row)
           {
-            for (String col:row)
-            {
-              out.write(col == null ? Utilities.nullStringOutput.getBytes() : col.getBytes());
+            if (!firstCol)
               out.write(Utilities.tabCode);
-            } 
-            out.write(Utilities.newLineCode);
-          }
-          res.clear();
-
-        } catch (IOException e) {
-          e.printStackTrace();
+            out.print(col == null ? Utilities.nullStringOutput : col);
+            firstCol = false;
+          } 
+          out.write(Utilities.newLineCode);
         }
+        res.clear();
       }
     }
     return ret;
@@ -151,16 +148,20 @@
     SessionState.initHiveLog4j();
 
     CliSessionState ss = new CliSessionState (new HiveConf(SessionState.class));
+    ss.in = System.in;
+    try {
+      ss.out = new PrintStream(System.out, true, "UTF-8");
+      ss.err = new PrintStream(System.err, true, "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      System.exit(3);
+    }
+
     SessionState.start(ss);
 
     if(! oproc.process_stage2(ss)) {
       System.exit(2);
     }
 
-    ss.in = System.in;
-    ss.out = System.out;
-    ss.err = System.err;
-
     sp = new SetProcessor();
     qp = new Driver();
 

Modified: hadoop/core/trunk/src/contrib/hive/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/if/hive_metastore.thrift?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/if/hive_metastore.thrift (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/if/hive_metastore.thrift Fri Sep 19 16:56:30 2008
@@ -1,172 +1,107 @@
 #!/usr/local/bin/thrift -java
 #
 # Thrift Service that the MetaStore is built on
-# Pete Wyckoff (pwyckoff@facebook.com)
 #
 
-/**
- * The available types in Thrift:
- *
- *  bool        Boolean, one byte
- *  byte        Signed byte
- *  i16         Signed 16-bit integer
- *  i32         Signed 32-bit integer
- *  i64         Signed 64-bit integer
- *  double      64-bit floating point value
- *  string      String
- *  map<t1,t2>  Map from one type to another
- *  list<t1>    Ordered list of one type
- *  set<t1>     Set of unique elements of one type
- *
- */
-
 include "thrift/fb303/if/fb303.thrift"
 
-
 namespace java org.apache.hadoop.hive.metastore.api
-namespace php metastore
-
-// below is terrible - tight, tight integration like you know who... bugbug
-// need to add a level of indirection to name these things..
-const string META_SERDE = "com.facebook.serde.simple_meta.MetadataTypedColumnsetSerDe"
-
-const string META_TABLE_COLUMNS  = "columns",
-const string BUCKET_FIELD_NAME   = "bucket_field_name",
-const string BUCKET_COUNT        = "bucket_count",
-const string FIELD_TO_DIMENSION  = "field_to_dimension",
-const string META_TABLE_NAME     = "name",
-const string META_TABLE_DB     = "db",
-const string META_TABLE_LOCATION = "location",
-const string META_TABLE_SERDE    = "serde",
-const string SERIALIZATION_FORMAT = "serialization.format",
-const string SERIALIZATION_CLASS = "serialization.class",
-const string SERIALIZATION_LIB = "serialization.lib",
-const string META_TABLE_PARTITION_COLUMNS = "partition_columns",
-const string FILE_INPUT_FORMAT = "file.inputformat",
-const string FILE_OUTPUT_FORMAT = "file.outputformat",
-
-
-exception MetaException {
-  string message
-}
-
-exception UnknownTableException {
-  string message
-}
-
-exception UnknownDBException {
-  string message
-}
-
-// new metastore api, below will be merged with above
-
-const string KEY_COMMENTS = "key_comments";
-const string VERSION_0_1 = "0.1";
-
-typedef string PrimitiveType
-typedef string CollectionType
-
-const string TINYINT_TYPE_NAME = "tinyint";
-const string INT_TYPE_NAME = "int";
-const string BIGINT_TYPE_NAME = "bigint";
-const string FLOAT_TYPE_NAME = "float";
-const string DOUBLE_TYPE_NAME = "double"; 
-const string STRING_TYPE_NAME = "string";
-const string DATE_TYPE_NAME = "date";
-const string DATETIME_TYPE_NAME = "datetime";
-const string TIMESTAMP_TYPE_NAME = "timestamp";
-
-const string LIST_TYPE_NAME = "list";
-const string MAP_TYPE_NAME = "map";
-
-const set<string> PrimitiveTypes = [ TINYINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME ],
-const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
+php_namespace metastore
 
 struct Version {
-  string version,
-  string comments
+  1: string version,
+  2: string comments
 }
 
 struct FieldSchema {
-  string name, // name of the field
-  string type, // type of the field. primitive types defined above, specify list<TYPE_NAME>, map<TYPE_NAME, TYPE_NAME> for lists & maps 
-  string comment
+  1: string name, // name of the field
+  2: string type, // type of the field. primitive types defined above, specify list<TYPE_NAME>, map<TYPE_NAME, TYPE_NAME> for lists & maps 
+  3: string comment
 }
 
 struct Type {
-  string name, // one of the types in PrimitiveTypes or CollectionTypes or User defined types
-  optional string type1, // object type if the name is 'list' (LIST_TYPE), key type if the name is 'map' (MAP_TYPE)
-  optional string type2, // val type if the name is 'map' (MAP_TYPE)
-  optional list<FieldSchema> fields //if the name is one of the user defined types
+  1: string          name,             // one of the types in PrimitiveTypes or CollectionTypes or User defined types
+  2: optional string type1,            // object type if the name is 'list' (LIST_TYPE), key type if the name is 'map' (MAP_TYPE)
+  3: optional string type2,            // val type if the name is 'map' (MAP_TYPE)
+  4: optional list<FieldSchema> fields // if the name is one of the user defined types
 }
 
-// groups a set of tables
+// namespace for tables
 struct Database {
-  string name,
-  string locationUri,
+  1: string name,
+  2: string description,
 }
 
+// This object holds the information needed by SerDes
 struct SerDeInfo {
-  string name;
-  string serializationFormat;
-  string serializationClass;
-  string serializationLib;
-  string fieldDelim;
-  string collectionItemDelim;
-  string mapKeyDelim;
-  string lineDelim;
-  map<string, string> parameters
+  1: string name,                   // name of the serde, table name by default
+  2: string serializationLib,       // usually the class that implements the extractor & loader
+  3: map<string, string> parameters // initialization parameters
 }
 
+// sort order of a column (column name along with asc(1)/desc(0))
 struct Order {
-  string col,
-  i32 order
+  1: string col,  // sort column name
+  2: i32    order // asc(1) or desc(0)
 }
 
+// this object holds all the information about physical storage of the data belonging to a table
 struct StorageDescriptor {
-  list<FieldSchema> cols, // required (refer to types defined above)
-  string location, // defaults to <warehouse loc>/<db loc>/tablename
-  string inputFormat; // SequenceFileInputFormat (binary) or TextInputFormat`  or custom format
-  string outputFormat; // SequenceFileOutputFormat (binary) or IgnoreKeyTextOutputFormat or custom format
-  bool isCompressed; // compressed or not
-  i32 numBuckets = 32, // this must be specified if there are any dimension columns
-  SerDeInfo serdeInfo; // serialization and deserialization information
-  list<string> bucketCols, // reducer grouping columns and clustering columns and bucketing columns`
-  list<Order> sortCols, // sort order of the data in each bucket
-  map<string, string> parameters // any user supplied key value hash
+  1: list<FieldSchema> cols,  // required (refer to types defined above)
+  2: string location,         // defaults to <warehouse loc>/<db loc>/tablename
+  3: string inputFormat,      // SequenceFileInputFormat (binary) or TextInputFormat`  or custom format
+  4: string outputFormat,     // SequenceFileOutputFormat (binary) or IgnoreKeyTextOutputFormat or custom format
+  5: bool   compressed,       // compressed or not
+  6: i32    numBuckets,       // this must be specified if there are any dimension columns
+  7: SerDeInfo    serdeInfo,  // serialization and deserialization information
+  8: list<string> bucketCols, // reducer grouping columns and clustering columns and bucketing columns`
+  9: list<Order>  sortCols,   // sort order of the data in each bucket
+  10: map<string, string> parameters // any user supplied key value hash
 }
 
+// table information
 struct Table {
-  string tableName, 
-  string database,
-  string owner,
-  i32 createTime,
-  i32 lastAccessTime,
-  i32 retention,
-  StorageDescriptor sd,
-  list<FieldSchema> partitionKeys, // optional
-  map<string, string> parameters // to store comments or any other user level parameters
+  1: string tableName,                // name of the table
+  2: string dbName,                   // database name ('default')
+  3: string owner,                    // owner of this table
+  4: i32    createTime,               // creation time of the table
+  5: i32    lastAccessTime,           // last access time (usually this will be filled from HDFS and shouldn't be relied on)
+  6: i32    retention,                // retention time
+  7: StorageDescriptor sd,            // storage descriptor of the table
+  8: list<FieldSchema> partitionKeys, // partition keys of the table. only primitive types are supported
+  9: map<string, string> parameters   // to store comments or any other user level parameters
 }
 
 struct Partition {
-  // keys are inherited from table. this should be okay because partition keys can't be changed over time
-  list<string> values // string value is converted to appropriate partition key type
-  string database,
-  string tableName,
-  i32 createTime,
-  i32 lastAccessTime,
-  StorageDescriptor sd,
-  map<string, string> parameters
+  1: list<string> values // string value is converted to appropriate partition key type
+  2: string       dbName,
+  3: string       tableName,
+  4: i32          createTime,
+  5: i32          lastAccessTime,
+  6: StorageDescriptor   sd,
+  7: map<string, string> parameters
 }
 
 // index on a hive table is also another table whose columns are the subset of the base table columns along with the offset
 // this will automatically generate table (table_name_index_name)
 struct Index {
-  string indexName, // unique with in the whole database namespace
-  i32 indexType, // reserved
-  string tableName,
-  string databaseName,
-  list<string> colNames, // for now columns will be sorted in the ascending order
+  1: string       indexName, // unique with in the whole database namespace
+  2: i32          indexType, // reserved
+  3: string       tableName,
+  4: string       dbName,
+  5: list<string> colNames,  // for now columns will be sorted in the ascending order
+}
+
+exception MetaException {
+  string message
+}
+
+exception UnknownTableException {
+  string message
+}
+
+exception UnknownDBException {
+  string message
 }
 
 exception AlreadyExistsException {
@@ -194,105 +129,123 @@
 }
 
 /**
-* This interface is NOT live yet.
+* This interface is live.
 */
 service ThriftHiveMetastore extends fb303.FacebookService
 {
-  // Database
-  bool create_database(1:string name, 2:string location_uri) throws(1:AlreadyExistsException o1, 2:MetaException o2)
+  bool create_database(1:string name, 2:string description) 
+                                       throws(1:AlreadyExistsException o1, 2:MetaException o2)
   Database get_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2)
-  bool drop_database(1:string name) throws(2:MetaException o2)
-  list<string> get_databases() throws(1:MetaException o1)
+  bool drop_database(1:string name)    throws(2:MetaException o2)
+  list<string> get_databases()         throws(1:MetaException o1)
 
-  // Type
   // returns the type with given name (make seperate calls for the dependent types if needed)
-  Type get_type(1:string name) throws(1:MetaException o2)
+  Type get_type(1:string name)  throws(1:MetaException o2)
   bool create_type(1:Type type) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
   bool drop_type(1:string type) throws(1:MetaException o2)
-  map<string, Type> get_type_all(1:string name) throws(1:MetaException o2)
+  map<string, Type> get_type_all(1:string name) 
+                                throws(1:MetaException o2)
 
-  list<FieldSchema> get_fields(string db_name, string table_name) throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  list<FieldSchema> get_fields(string db_name, string table_name) throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
-  // Tables
   // create a Hive table. Following fields must be set
-  // Table.tableName
-  // Table.database (only 'default' for now until Hive QL supports databases)
-  // Table.owner (not needed, but good to have for tracking purposes)
-  // Table.sd.cols (list of field schemas)
-  // Table.sd.inputFormat ( SequenceFileInputFormat (binary like falcon tables or u_full) or TextInputFormat)
-  // Table.sd.outputFormat ( SequenceFileInputFormat (binary) or TextInputFormat)
-  // Table.sd.serdeInfo.serializationLib (SerDe class name such as org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe
-  void create_table(1:Table tbl) throws(1:AlreadyExistsException ouch1, 2:InvalidObjectException ouch2, 3:MetaException ouch3, 4:NoSuchObjectException o4)
+  // tableName
+  // database        (only 'default' for now until Hive QL supports databases)
+  // owner           (not needed, but good to have for tracking purposes)
+  // sd.cols         (list of field schemas)
+  // sd.inputFormat  (SequenceFileInputFormat (binary like falcon tables or u_full) or TextInputFormat)
+  // sd.outputFormat (SequenceFileInputFormat (binary) or TextInputFormat)
+  // sd.serdeInfo.serializationLib (SerDe class name eg org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe
+  void create_table(1:Table tbl) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3, 4:NoSuchObjectException o4)
   // drops the table and all the partitions associated with it if the table has partitions
   // delete data (including partitions) if deleteData is set to true
-  void drop_table(1:string dbname, 2:string name, 3:bool deleteData) throws(1:NoSuchObjectException o1, 2:MetaException ouch3)
-  list<string> get_tables(string db_name, string pattern)  throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3)
-  Table get_table(1:string dbname, 2:string tbl_name) throws (1:MetaException o1, 2:NoSuchObjectException o2)
-  bool set_table_parameters(1:string dbname, 2:string tbl_name, 3:map<string, string> params) throws(1:NoSuchObjectException o1, 2:MetaException o2)
-  // this applies to only future partitions not for existing partitions
-  void alter_table(1:string dbname, 2:string tbl_name, 3:Table new_tbl) throws (1:InvalidOperationException o1, 2:MetaException o2)
-  void truncate_table(1:string db_name, 2:string table_name, 3:string partition)  throws (1:MetaException ouch1, 2:UnknownTableException ouch2, 3:UnknownDBException ouch3),
-  list<string> cat(1:string db_name, 2:string table_name, 3:string partition, i32 high) throws  (MetaException ouch1, UnknownDBException ouch2, UnknownTableException ouch3),
+  void drop_table(1:string dbname, 2:string name, 3:bool deleteData) 
+                       throws(1:NoSuchObjectException o1, 2:MetaException o3)
+  list<string> get_tables(string db_name, string pattern)
+                       throws (MetaException o1, UnknownTableException o2, UnknownDBException o3)
+  Table get_table(1:string dbname, 2:string tbl_name) 
+                       throws (1:MetaException o1, 2:NoSuchObjectException o2)
+  // alter table applies to only future partitions not for existing partitions
+  void alter_table(1:string dbname, 2:string tbl_name, 3:Table new_tbl) 
+                       throws (1:InvalidOperationException o1, 2:MetaException o2)
 
-  // Partition
   // the following applies to only tables that have partitions
-  Partition add_partition(1:Partition new_part) throws(1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
-  Partition append_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals) throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
-  bool drop_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals, 4:bool deleteData) throws(1:NoSuchObjectException o1, 2:MetaException o2)
-  Partition get_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals) throws(1:MetaException o1)
-  // returns all the partitions for this table in reverse chronological order. if max parts is given then it will return only that many
-  list<Partition> get_partitions(1:string db_name, 2:string tbl_name, 3:i16 max_parts=-1) throws(1:NoSuchObjectException o1, 2:MetaException o2)
-  // this can be used if an old parition has to be recreated
-  bool set_partition_parameters(1:string db_name, 2:string tbl_name, 3:string pname, 4:map<string, string> params) throws(1:NoSuchObjectException o1, 2:MetaException o2)
-  // changes the given partitions to the new storage descriptor. all partitions should belong to the same table
-  bool alter_partitions(1:StorageDescriptor sd, 2:list<string> parts) throws(1:InvalidOperationException o1, 2:MetaException o2)
+  Partition add_partition(1:Partition new_part) 
+                       throws(1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
+  Partition append_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals) 
+                       throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
+  bool drop_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals, 4:bool deleteData) 
+                       throws(1:NoSuchObjectException o1, 2:MetaException o2)
+  Partition get_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals) 
+                       throws(1:MetaException o1)
+  // returns all the partitions for this table in reverse chronological order. 
+  // if max parts is given then it will return only that many
+  list<Partition> get_partitions(1:string db_name, 2:string tbl_name, 3:i16 max_parts=-1) 
+                       throws(1:NoSuchObjectException o1, 2:MetaException o2)
+  list<string> get_partition_names(1:string db_name, 2:string tbl_name, 3:i16 max_parts=-1) 
+                       throws(1:MetaException o2)
+  // set new storage descriptor. all partitions should belong to the same table
+  bool alter_partitions(1:StorageDescriptor sd, 2:list<string> parts) 
+                       throws(1:InvalidOperationException o1, 2:MetaException o2)
 
-  // Index
   // index related metadata (may not be in the open source)
   bool create_index(1:Index index_def) throws(1:IndexAlreadyExistsException o1, 2:MetaException o2)
 }
 
 
 /**
-* This interface is the live.
+* This interface is deprecated.
 */
 service ThriftMetaStore extends fb303.FacebookService
 {
   // retrieve a printable representation of the fields in a table (logfile, type) or table subtype
-  list<FieldSchema> get_fields(string db_name, string table_name) throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  list<FieldSchema> get_fields(string db_name, string table_name) throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // get all the tables (logfiles, types) in the metastore - no partitioning like different dbs yet
-  list<string> get_tables(string db_name, string pattern)  throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  list<string> get_tables(string db_name, string pattern)  throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // retrieve the opaque schema representation of this table (logfile, type) which contains enough
   // information for the caller to instantiate some kind of object that will let it examine the type.
   // That object might be a thrift, jute, or SerDe.
-  map<string,string> get_schema(string table_name) throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  map<string,string> get_schema(string table_name) throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // add some structure to the table or change its structure
-  void alter_table(string db_name, string table_name, map<string,string> schema) throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  void alter_table(string db_name, string table_name, map<string,string> schema) throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // create_table == create_table4 (table_name, SIMPLE_META_SERDE, '\t', "",  dict [ META_COLUMNS => columns]
   // bugbug do above transformation and deprecate this API
-  void create_table(string db_name, string table_name, map<string,string> schema) throws (MetaException ouch1, UnknownDBException ouch2),
+  void create_table(string db_name, string table_name, map<string,string> schema) throws (MetaException o1, UnknownDBException o2),
 
   // drop a table (i.e., remove it from the metastore) - for now allow metastore to do the delete (so python shell can do drops)
-  void drop_table(string db_name, string table_name) throws  (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  void drop_table(string db_name, string table_name) throws  (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // truncate a table - i.e., delete its data, but keep the hdfs directory and the schema
-  void truncate_table(string db_name, string table_name, string partition)  throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  void truncate_table(string db_name, string table_name, string partition)  throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // generally does the table exist
-  bool table_exists(string db_name, string table_name) throws (MetaException ouch1, UnknownDBException ouch2),
+  bool table_exists(string db_name, string table_name) throws (MetaException o1, UnknownDBException o2),
 
   // create a table with named columns
-  list<string> get_partitions(string db_name, string table_name) throws (MetaException ouch1, UnknownTableException ouch2, UnknownDBException ouch3),
+  list<string> get_partitions(string db_name, string table_name) throws (MetaException o1, UnknownTableException o2, UnknownDBException o3),
 
   // enumerate all the databases in this store
-  list<string> get_dbs() throws  (MetaException ouch),
+  list<string> get_dbs() throws  (MetaException o),
 
   // /bin/cat the table in human readable format
-  list<string> cat(string db_name, string table_name,string partition, i32 high) throws  (MetaException ouch1, UnknownDBException ouch2, UnknownTableException ouch3),
+  list<string> cat(string db_name, string table_name,string partition, i32 high) throws  (MetaException o1, UnknownDBException o2, UnknownTableException o3),
 }
 
+// these should be needed only for backward compatibility with filestore
+const string META_TABLE_COLUMNS   = "columns",
+const string BUCKET_FIELD_NAME    = "bucket_field_name",
+const string BUCKET_COUNT         = "bucket_count",
+const string FIELD_TO_DIMENSION   = "field_to_dimension",
+const string META_TABLE_NAME      = "name",
+const string META_TABLE_DB        = "db",
+const string META_TABLE_LOCATION  = "location",
+const string META_TABLE_SERDE     = "serde",
+const string META_TABLE_PARTITION_COLUMNS = "partition_columns",
+const string FILE_INPUT_FORMAT    = "file.inputformat",
+const string FILE_OUTPUT_FORMAT   = "file.outputformat",
+
 

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java Fri Sep 19 16:56:30 2008
@@ -15,8 +15,6 @@
 
 public class Constants {
 
-  public static final String META_SERDE = "com.facebook.serde.simple_meta.MetadataTypedColumnsetSerDe";
-
   public static final String META_TABLE_COLUMNS = "columns";
 
   public static final String BUCKET_FIELD_NAME = "bucket_field_name";
@@ -33,61 +31,10 @@
 
   public static final String META_TABLE_SERDE = "serde";
 
-  public static final String SERIALIZATION_FORMAT = "serialization.format";
-
-  public static final String SERIALIZATION_CLASS = "serialization.class";
-
-  public static final String SERIALIZATION_LIB = "serialization.lib";
-
   public static final String META_TABLE_PARTITION_COLUMNS = "partition_columns";
 
   public static final String FILE_INPUT_FORMAT = "file.inputformat";
 
   public static final String FILE_OUTPUT_FORMAT = "file.outputformat";
 
-  public static final String KEY_COMMENTS = "key_comments";
-
-  public static final String VERSION_0_1 = "0.1";
-
-  public static final String TINYINT_TYPE_NAME = "tinyint";
-
-  public static final String INT_TYPE_NAME = "int";
-
-  public static final String BIGINT_TYPE_NAME = "bigint";
-
-  public static final String FLOAT_TYPE_NAME = "float";
-
-  public static final String DOUBLE_TYPE_NAME = "double";
-
-  public static final String STRING_TYPE_NAME = "string";
-
-  public static final String DATE_TYPE_NAME = "date";
-
-  public static final String DATETIME_TYPE_NAME = "datetime";
-
-  public static final String TIMESTAMP_TYPE_NAME = "timestamp";
-
-  public static final String LIST_TYPE_NAME = "list";
-
-  public static final String MAP_TYPE_NAME = "map";
-
-  public static final Set<String> PrimitiveTypes = new HashSet<String>();
-  static {
-    PrimitiveTypes.add("tinyint");
-    PrimitiveTypes.add("int");
-    PrimitiveTypes.add("bigint");
-    PrimitiveTypes.add("float");
-    PrimitiveTypes.add("double");
-    PrimitiveTypes.add("string");
-    PrimitiveTypes.add("date");
-    PrimitiveTypes.add("datetime");
-    PrimitiveTypes.add("timestamp");
-  }
-
-  public static final Set<String> CollectionTypes = new HashSet<String>();
-  static {
-    CollectionTypes.add("list");
-    CollectionTypes.add("map");
-  }
-
 }

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java Fri Sep 19 16:56:30 2008
@@ -18,12 +18,12 @@
 
 public class Database implements TBase, java.io.Serializable {
 private String name;
-private String locationUri;
+private String description;
 
 public final Isset __isset = new Isset();
 public static final class Isset implements java.io.Serializable {
 public boolean name = false;
-public boolean locationUri = false;
+public boolean description = false;
 }
 
 public Database() {
@@ -31,13 +31,13 @@
 
 public Database(
 String name,
-String locationUri)
+String description)
 {
 this();
 this.name = name;
 this.__isset.name = true;
-this.locationUri = locationUri;
-this.__isset.locationUri = true;
+this.description = description;
+this.__isset.description = true;
 }
 
 public String getName() {
@@ -53,17 +53,17 @@
 this.__isset.name = false;
 }
 
-public String getLocationUri() {
-return this.locationUri;
+public String getDescription() {
+return this.description;
 }
 
-public void setLocationUri(String locationUri) {
-this.locationUri = locationUri;
-this.__isset.locationUri = true;
+public void setDescription(String description) {
+this.description = description;
+this.__isset.description = true;
 }
 
-public void unsetLocationUri() {
-this.__isset.locationUri = false;
+public void unsetDescription() {
+this.__isset.description = false;
 }
 
 public boolean equals(Object that) {
@@ -87,12 +87,12 @@
   return false;
 }
 
-boolean this_present_locationUri = true && (this.locationUri != null);
-boolean that_present_locationUri = true && (that.locationUri != null);
-if (this_present_locationUri || that_present_locationUri) {
-if (!(this_present_locationUri && that_present_locationUri))
+boolean this_present_description = true && (this.description != null);
+boolean that_present_description = true && (that.description != null);
+if (this_present_description || that_present_description) {
+if (!(this_present_description && that_present_description))
   return false;
-if (!this.locationUri.equals(that.locationUri))
+if (!this.description.equals(that.description))
   return false;
 }
 
@@ -114,7 +114,7 @@
 }
 switch (field.id)
 {
-  case -1:
+  case 1:
     if (field.type == TType.STRING) {
       this.name = iprot.readString();
       this.__isset.name = true;
@@ -122,10 +122,10 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -2:
+  case 2:
     if (field.type == TType.STRING) {
-      this.locationUri = iprot.readString();
-      this.__isset.locationUri = true;
+      this.description = iprot.readString();
+      this.__isset.description = true;
     } else { 
       TProtocolUtil.skip(iprot, field.type);
     }
@@ -146,17 +146,17 @@
 if (this.name != null) {
 field.name = "name";
 field.type = TType.STRING;
-field.id = -1;
+field.id = 1;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.name);
 oprot.writeFieldEnd();
 }
-if (this.locationUri != null) {
-field.name = "locationUri";
+if (this.description != null) {
+field.name = "description";
 field.type = TType.STRING;
-field.id = -2;
+field.id = 2;
 oprot.writeFieldBegin(field);
-oprot.writeString(this.locationUri);
+oprot.writeString(this.description);
 oprot.writeFieldEnd();
 }
 oprot.writeFieldStop();
@@ -167,8 +167,8 @@
 StringBuilder sb = new StringBuilder("Database(");
 sb.append("name:");
 sb.append(this.name);
-sb.append(",locationUri:");
-sb.append(this.locationUri);
+sb.append(",description:");
+sb.append(this.description);
 sb.append(")");
 return sb.toString();
 }

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java Fri Sep 19 16:56:30 2008
@@ -141,7 +141,7 @@
       }
       switch (field.id)
       {
-        case -1:
+        case 1:
           if (field.type == TType.STRING) {
             this.name = iprot.readString();
             this.__isset.name = true;
@@ -149,7 +149,7 @@
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
-        case -2:
+        case 2:
           if (field.type == TType.STRING) {
             this.type = iprot.readString();
             this.__isset.type = true;
@@ -157,7 +157,7 @@
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
-        case -3:
+        case 3:
           if (field.type == TType.STRING) {
             this.comment = iprot.readString();
             this.__isset.comment = true;
@@ -181,7 +181,7 @@
     if (this.name != null) {
       field.name = "name";
       field.type = TType.STRING;
-      field.id = -1;
+      field.id = 1;
       oprot.writeFieldBegin(field);
       oprot.writeString(this.name);
       oprot.writeFieldEnd();
@@ -189,7 +189,7 @@
     if (this.type != null) {
       field.name = "type";
       field.type = TType.STRING;
-      field.id = -2;
+      field.id = 2;
       oprot.writeFieldBegin(field);
       oprot.writeString(this.type);
       oprot.writeFieldEnd();
@@ -197,7 +197,7 @@
     if (this.comment != null) {
       field.name = "comment";
       field.type = TType.STRING;
-      field.id = -3;
+      field.id = 3;
       oprot.writeFieldBegin(field);
       oprot.writeString(this.comment);
       oprot.writeFieldEnd();

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java Fri Sep 19 16:56:30 2008
@@ -20,7 +20,7 @@
 private String indexName;
 private int indexType;
 private String tableName;
-private String databaseName;
+private String dbName;
 private List<String> colNames;
 
 public final Isset __isset = new Isset();
@@ -28,7 +28,7 @@
 public boolean indexName = false;
 public boolean indexType = false;
 public boolean tableName = false;
-public boolean databaseName = false;
+public boolean dbName = false;
 public boolean colNames = false;
 }
 
@@ -39,7 +39,7 @@
 String indexName,
 int indexType,
 String tableName,
-String databaseName,
+String dbName,
 List<String> colNames)
 {
 this();
@@ -49,8 +49,8 @@
 this.__isset.indexType = true;
 this.tableName = tableName;
 this.__isset.tableName = true;
-this.databaseName = databaseName;
-this.__isset.databaseName = true;
+this.dbName = dbName;
+this.__isset.dbName = true;
 this.colNames = colNames;
 this.__isset.colNames = true;
 }
@@ -94,17 +94,17 @@
 this.__isset.tableName = false;
 }
 
-public String getDatabaseName() {
-return this.databaseName;
+public String getDbName() {
+return this.dbName;
 }
 
-public void setDatabaseName(String databaseName) {
-this.databaseName = databaseName;
-this.__isset.databaseName = true;
+public void setDbName(String dbName) {
+this.dbName = dbName;
+this.__isset.dbName = true;
 }
 
-public void unsetDatabaseName() {
-this.__isset.databaseName = false;
+public void unsetDbName() {
+this.__isset.dbName = false;
 }
 
 public int getColNamesSize() {
@@ -176,12 +176,12 @@
   return false;
 }
 
-boolean this_present_databaseName = true && (this.databaseName != null);
-boolean that_present_databaseName = true && (that.databaseName != null);
-if (this_present_databaseName || that_present_databaseName) {
-if (!(this_present_databaseName && that_present_databaseName))
+boolean this_present_dbName = true && (this.dbName != null);
+boolean that_present_dbName = true && (that.dbName != null);
+if (this_present_dbName || that_present_dbName) {
+if (!(this_present_dbName && that_present_dbName))
   return false;
-if (!this.databaseName.equals(that.databaseName))
+if (!this.dbName.equals(that.dbName))
   return false;
 }
 
@@ -212,7 +212,7 @@
 }
 switch (field.id)
 {
-  case -1:
+  case 1:
     if (field.type == TType.STRING) {
       this.indexName = iprot.readString();
       this.__isset.indexName = true;
@@ -220,7 +220,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -2:
+  case 2:
     if (field.type == TType.I32) {
       this.indexType = iprot.readI32();
       this.__isset.indexType = true;
@@ -228,7 +228,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -3:
+  case 3:
     if (field.type == TType.STRING) {
       this.tableName = iprot.readString();
       this.__isset.tableName = true;
@@ -236,15 +236,15 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -4:
+  case 4:
     if (field.type == TType.STRING) {
-      this.databaseName = iprot.readString();
-      this.__isset.databaseName = true;
+      this.dbName = iprot.readString();
+      this.__isset.dbName = true;
     } else { 
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -5:
+  case 5:
     if (field.type == TType.LIST) {
       {
         TList _list44 = iprot.readListBegin();
@@ -278,37 +278,37 @@
 if (this.indexName != null) {
 field.name = "indexName";
 field.type = TType.STRING;
-field.id = -1;
+field.id = 1;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.indexName);
 oprot.writeFieldEnd();
 }
 field.name = "indexType";
 field.type = TType.I32;
-field.id = -2;
+field.id = 2;
 oprot.writeFieldBegin(field);
 oprot.writeI32(this.indexType);
 oprot.writeFieldEnd();
 if (this.tableName != null) {
 field.name = "tableName";
 field.type = TType.STRING;
-field.id = -3;
+field.id = 3;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.tableName);
 oprot.writeFieldEnd();
 }
-if (this.databaseName != null) {
-field.name = "databaseName";
+if (this.dbName != null) {
+field.name = "dbName";
 field.type = TType.STRING;
-field.id = -4;
+field.id = 4;
 oprot.writeFieldBegin(field);
-oprot.writeString(this.databaseName);
+oprot.writeString(this.dbName);
 oprot.writeFieldEnd();
 }
 if (this.colNames != null) {
 field.name = "colNames";
 field.type = TType.LIST;
-field.id = -5;
+field.id = 5;
 oprot.writeFieldBegin(field);
 {
   oprot.writeListBegin(new TList(TType.STRING, this.colNames.size()));
@@ -331,8 +331,8 @@
 sb.append(this.indexType);
 sb.append(",tableName:");
 sb.append(this.tableName);
-sb.append(",databaseName:");
-sb.append(this.databaseName);
+sb.append(",dbName:");
+sb.append(this.dbName);
 sb.append(",colNames:");
 sb.append(this.colNames);
 sb.append(")");

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java Fri Sep 19 16:56:30 2008
@@ -17,116 +17,116 @@
 import com.facebook.thrift.transport.*;
 
 public class MetaException extends Exception implements TBase, java.io.Serializable {
-  private String message;
+private String message;
 
-  public final Isset __isset = new Isset();
-  public static final class Isset implements java.io.Serializable {
-    public boolean message = false;
-  }
-
-  public MetaException() {
-  }
-
-  public MetaException(
-    String message)
-  {
-    this();
-    this.message = message;
-    this.__isset.message = true;
-  }
-
-  public String getMessage() {
-    return this.message;
-  }
-
-  public void setMessage(String message) {
-    this.message = message;
-    this.__isset.message = true;
-  }
-
-  public void unsetMessage() {
-    this.__isset.message = false;
-  }
-
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof MetaException)
-      return this.equals((MetaException)that);
-    return false;
-  }
-
-  public boolean equals(MetaException that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_message = true && (this.message != null);
-    boolean that_present_message = true && (that.message != null);
-    if (this_present_message || that_present_message) {
-      if (!(this_present_message && that_present_message))
-        return false;
-      if (!this.message.equals(that.message))
-        return false;
-    }
+public final Isset __isset = new Isset();
+public static final class Isset implements java.io.Serializable {
+public boolean message = false;
+}
 
-    return true;
-  }
+public MetaException() {
+}
 
-  public int hashCode() {
-    return 0;
-  }
-
-  public void read(TProtocol iprot) throws TException {
-    TField field;
-    iprot.readStructBegin();
-    while (true)
-    {
-      field = iprot.readFieldBegin();
-      if (field.type == TType.STOP) { 
-        break;
-      }
-      switch (field.id)
-      {
-        case -1:
-          if (field.type == TType.STRING) {
-            this.message = iprot.readString();
-            this.__isset.message = true;
-          } else { 
-            TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        default:
-          TProtocolUtil.skip(iprot, field.type);
-          break;
-      }
-      iprot.readFieldEnd();
-    }
-    iprot.readStructEnd();
-  }
+public MetaException(
+String message)
+{
+this();
+this.message = message;
+this.__isset.message = true;
+}
+
+public String getMessage() {
+return this.message;
+}
+
+public void setMessage(String message) {
+this.message = message;
+this.__isset.message = true;
+}
+
+public void unsetMessage() {
+this.__isset.message = false;
+}
+
+public boolean equals(Object that) {
+if (that == null)
+  return false;
+if (that instanceof MetaException)
+  return this.equals((MetaException)that);
+return false;
+}
+
+public boolean equals(MetaException that) {
+if (that == null)
+  return false;
+
+boolean this_present_message = true && (this.message != null);
+boolean that_present_message = true && (that.message != null);
+if (this_present_message || that_present_message) {
+if (!(this_present_message && that_present_message))
+  return false;
+if (!this.message.equals(that.message))
+  return false;
+}
+
+return true;
+}
 
-  public void write(TProtocol oprot) throws TException {
-    TStruct struct = new TStruct("MetaException");
-    oprot.writeStructBegin(struct);
-    TField field = new TField();
-    if (this.message != null) {
-      field.name = "message";
-      field.type = TType.STRING;
-      field.id = -1;
-      oprot.writeFieldBegin(field);
-      oprot.writeString(this.message);
-      oprot.writeFieldEnd();
+public int hashCode() {
+return 0;
+}
+
+public void read(TProtocol iprot) throws TException {
+TField field;
+iprot.readStructBegin();
+while (true)
+{
+field = iprot.readFieldBegin();
+if (field.type == TType.STOP) { 
+  break;
+}
+switch (field.id)
+{
+  case -1:
+    if (field.type == TType.STRING) {
+      this.message = iprot.readString();
+      this.__isset.message = true;
+    } else { 
+      TProtocolUtil.skip(iprot, field.type);
     }
-    oprot.writeFieldStop();
-    oprot.writeStructEnd();
-  }
-
-  public String toString() {
-    StringBuilder sb = new StringBuilder("MetaException(");
-    sb.append("message:");
-    sb.append(this.message);
-    sb.append(")");
-    return sb.toString();
-  }
+    break;
+  default:
+    TProtocolUtil.skip(iprot, field.type);
+    break;
+}
+iprot.readFieldEnd();
+}
+iprot.readStructEnd();
+}
+
+public void write(TProtocol oprot) throws TException {
+TStruct struct = new TStruct("MetaException");
+oprot.writeStructBegin(struct);
+TField field = new TField();
+if (this.message != null) {
+field.name = "message";
+field.type = TType.STRING;
+field.id = -1;
+oprot.writeFieldBegin(field);
+oprot.writeString(this.message);
+oprot.writeFieldEnd();
+}
+oprot.writeFieldStop();
+oprot.writeStructEnd();
+}
+
+public String toString() {
+StringBuilder sb = new StringBuilder("MetaException(");
+sb.append("message:");
+sb.append(this.message);
+sb.append(")");
+return sb.toString();
+}
 
 }
 

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java Fri Sep 19 16:56:30 2008
@@ -114,7 +114,7 @@
 }
 switch (field.id)
 {
-  case -1:
+  case 1:
     if (field.type == TType.STRING) {
       this.col = iprot.readString();
       this.__isset.col = true;
@@ -122,7 +122,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -2:
+  case 2:
     if (field.type == TType.I32) {
       this.order = iprot.readI32();
       this.__isset.order = true;
@@ -146,14 +146,14 @@
 if (this.col != null) {
 field.name = "col";
 field.type = TType.STRING;
-field.id = -1;
+field.id = 1;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.col);
 oprot.writeFieldEnd();
 }
 field.name = "order";
 field.type = TType.I32;
-field.id = -2;
+field.id = 2;
 oprot.writeFieldBegin(field);
 oprot.writeI32(this.order);
 oprot.writeFieldEnd();

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java Fri Sep 19 16:56:30 2008
@@ -18,7 +18,7 @@
 
 public class Partition implements TBase, java.io.Serializable {
 private List<String> values;
-private String database;
+private String dbName;
 private String tableName;
 private int createTime;
 private int lastAccessTime;
@@ -28,7 +28,7 @@
 public final Isset __isset = new Isset();
 public static final class Isset implements java.io.Serializable {
 public boolean values = false;
-public boolean database = false;
+public boolean dbName = false;
 public boolean tableName = false;
 public boolean createTime = false;
 public boolean lastAccessTime = false;
@@ -41,7 +41,7 @@
 
 public Partition(
 List<String> values,
-String database,
+String dbName,
 String tableName,
 int createTime,
 int lastAccessTime,
@@ -51,8 +51,8 @@
 this();
 this.values = values;
 this.__isset.values = true;
-this.database = database;
-this.__isset.database = true;
+this.dbName = dbName;
+this.__isset.dbName = true;
 this.tableName = tableName;
 this.__isset.tableName = true;
 this.createTime = createTime;
@@ -95,17 +95,17 @@
 this.__isset.values = false;
 }
 
-public String getDatabase() {
-return this.database;
+public String getDbName() {
+return this.dbName;
 }
 
-public void setDatabase(String database) {
-this.database = database;
-this.__isset.database = true;
+public void setDbName(String dbName) {
+this.dbName = dbName;
+this.__isset.dbName = true;
 }
 
-public void unsetDatabase() {
-this.__isset.database = false;
+public void unsetDbName() {
+this.__isset.dbName = false;
 }
 
 public String getTableName() {
@@ -208,12 +208,12 @@
   return false;
 }
 
-boolean this_present_database = true && (this.database != null);
-boolean that_present_database = true && (that.database != null);
-if (this_present_database || that_present_database) {
-if (!(this_present_database && that_present_database))
+boolean this_present_dbName = true && (this.dbName != null);
+boolean that_present_dbName = true && (that.dbName != null);
+if (this_present_dbName || that_present_dbName) {
+if (!(this_present_dbName && that_present_dbName))
   return false;
-if (!this.database.equals(that.database))
+if (!this.dbName.equals(that.dbName))
   return false;
 }
 
@@ -280,7 +280,7 @@
 }
 switch (field.id)
 {
-  case -1:
+  case 1:
     if (field.type == TType.LIST) {
       {
         TList _list35 = iprot.readListBegin();
@@ -298,15 +298,15 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -2:
+  case 2:
     if (field.type == TType.STRING) {
-      this.database = iprot.readString();
-      this.__isset.database = true;
+      this.dbName = iprot.readString();
+      this.__isset.dbName = true;
     } else { 
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -3:
+  case 3:
     if (field.type == TType.STRING) {
       this.tableName = iprot.readString();
       this.__isset.tableName = true;
@@ -314,7 +314,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -4:
+  case 4:
     if (field.type == TType.I32) {
       this.createTime = iprot.readI32();
       this.__isset.createTime = true;
@@ -322,7 +322,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -5:
+  case 5:
     if (field.type == TType.I32) {
       this.lastAccessTime = iprot.readI32();
       this.__isset.lastAccessTime = true;
@@ -330,7 +330,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -6:
+  case 6:
     if (field.type == TType.STRUCT) {
       this.sd = new StorageDescriptor();
       this.sd.read(iprot);
@@ -339,7 +339,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -7:
+  case 7:
     if (field.type == TType.MAP) {
       {
         TMap _map38 = iprot.readMapBegin();
@@ -375,7 +375,7 @@
 if (this.values != null) {
 field.name = "values";
 field.type = TType.LIST;
-field.id = -1;
+field.id = 1;
 oprot.writeFieldBegin(field);
 {
   oprot.writeListBegin(new TList(TType.STRING, this.values.size()));
@@ -386,38 +386,38 @@
 }
 oprot.writeFieldEnd();
 }
-if (this.database != null) {
-field.name = "database";
+if (this.dbName != null) {
+field.name = "dbName";
 field.type = TType.STRING;
-field.id = -2;
+field.id = 2;
 oprot.writeFieldBegin(field);
-oprot.writeString(this.database);
+oprot.writeString(this.dbName);
 oprot.writeFieldEnd();
 }
 if (this.tableName != null) {
 field.name = "tableName";
 field.type = TType.STRING;
-field.id = -3;
+field.id = 3;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.tableName);
 oprot.writeFieldEnd();
 }
 field.name = "createTime";
 field.type = TType.I32;
-field.id = -4;
+field.id = 4;
 oprot.writeFieldBegin(field);
 oprot.writeI32(this.createTime);
 oprot.writeFieldEnd();
 field.name = "lastAccessTime";
 field.type = TType.I32;
-field.id = -5;
+field.id = 5;
 oprot.writeFieldBegin(field);
 oprot.writeI32(this.lastAccessTime);
 oprot.writeFieldEnd();
 if (this.sd != null) {
 field.name = "sd";
 field.type = TType.STRUCT;
-field.id = -6;
+field.id = 6;
 oprot.writeFieldBegin(field);
 this.sd.write(oprot);
 oprot.writeFieldEnd();
@@ -425,7 +425,7 @@
 if (this.parameters != null) {
 field.name = "parameters";
 field.type = TType.MAP;
-field.id = -7;
+field.id = 7;
 oprot.writeFieldBegin(field);
 {
   oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.parameters.size()));
@@ -445,8 +445,8 @@
 StringBuilder sb = new StringBuilder("Partition(");
 sb.append("values:");
 sb.append(this.values);
-sb.append(",database:");
-sb.append(this.database);
+sb.append(",dbName:");
+sb.append(this.dbName);
 sb.append(",tableName:");
 sb.append(this.tableName);
 sb.append(",createTime:");

Modified: hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java (original)
+++ hadoop/core/trunk/src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java Fri Sep 19 16:56:30 2008
@@ -18,25 +18,13 @@
 
 public class SerDeInfo implements TBase, java.io.Serializable {
 private String name;
-private String serializationFormat;
-private String serializationClass;
 private String serializationLib;
-private String fieldDelim;
-private String collectionItemDelim;
-private String mapKeyDelim;
-private String lineDelim;
 private Map<String,String> parameters;
 
 public final Isset __isset = new Isset();
 public static final class Isset implements java.io.Serializable {
 public boolean name = false;
-public boolean serializationFormat = false;
-public boolean serializationClass = false;
 public boolean serializationLib = false;
-public boolean fieldDelim = false;
-public boolean collectionItemDelim = false;
-public boolean mapKeyDelim = false;
-public boolean lineDelim = false;
 public boolean parameters = false;
 }
 
@@ -45,32 +33,14 @@
 
 public SerDeInfo(
 String name,
-String serializationFormat,
-String serializationClass,
 String serializationLib,
-String fieldDelim,
-String collectionItemDelim,
-String mapKeyDelim,
-String lineDelim,
 Map<String,String> parameters)
 {
 this();
 this.name = name;
 this.__isset.name = true;
-this.serializationFormat = serializationFormat;
-this.__isset.serializationFormat = true;
-this.serializationClass = serializationClass;
-this.__isset.serializationClass = true;
 this.serializationLib = serializationLib;
 this.__isset.serializationLib = true;
-this.fieldDelim = fieldDelim;
-this.__isset.fieldDelim = true;
-this.collectionItemDelim = collectionItemDelim;
-this.__isset.collectionItemDelim = true;
-this.mapKeyDelim = mapKeyDelim;
-this.__isset.mapKeyDelim = true;
-this.lineDelim = lineDelim;
-this.__isset.lineDelim = true;
 this.parameters = parameters;
 this.__isset.parameters = true;
 }
@@ -88,32 +58,6 @@
 this.__isset.name = false;
 }
 
-public String getSerializationFormat() {
-return this.serializationFormat;
-}
-
-public void setSerializationFormat(String serializationFormat) {
-this.serializationFormat = serializationFormat;
-this.__isset.serializationFormat = true;
-}
-
-public void unsetSerializationFormat() {
-this.__isset.serializationFormat = false;
-}
-
-public String getSerializationClass() {
-return this.serializationClass;
-}
-
-public void setSerializationClass(String serializationClass) {
-this.serializationClass = serializationClass;
-this.__isset.serializationClass = true;
-}
-
-public void unsetSerializationClass() {
-this.__isset.serializationClass = false;
-}
-
 public String getSerializationLib() {
 return this.serializationLib;
 }
@@ -127,58 +71,6 @@
 this.__isset.serializationLib = false;
 }
 
-public String getFieldDelim() {
-return this.fieldDelim;
-}
-
-public void setFieldDelim(String fieldDelim) {
-this.fieldDelim = fieldDelim;
-this.__isset.fieldDelim = true;
-}
-
-public void unsetFieldDelim() {
-this.__isset.fieldDelim = false;
-}
-
-public String getCollectionItemDelim() {
-return this.collectionItemDelim;
-}
-
-public void setCollectionItemDelim(String collectionItemDelim) {
-this.collectionItemDelim = collectionItemDelim;
-this.__isset.collectionItemDelim = true;
-}
-
-public void unsetCollectionItemDelim() {
-this.__isset.collectionItemDelim = false;
-}
-
-public String getMapKeyDelim() {
-return this.mapKeyDelim;
-}
-
-public void setMapKeyDelim(String mapKeyDelim) {
-this.mapKeyDelim = mapKeyDelim;
-this.__isset.mapKeyDelim = true;
-}
-
-public void unsetMapKeyDelim() {
-this.__isset.mapKeyDelim = false;
-}
-
-public String getLineDelim() {
-return this.lineDelim;
-}
-
-public void setLineDelim(String lineDelim) {
-this.lineDelim = lineDelim;
-this.__isset.lineDelim = true;
-}
-
-public void unsetLineDelim() {
-this.__isset.lineDelim = false;
-}
-
 public int getParametersSize() {
 return (this.parameters == null) ? 0 : this.parameters.size();
 }
@@ -226,24 +118,6 @@
   return false;
 }
 
-boolean this_present_serializationFormat = true && (this.serializationFormat != null);
-boolean that_present_serializationFormat = true && (that.serializationFormat != null);
-if (this_present_serializationFormat || that_present_serializationFormat) {
-if (!(this_present_serializationFormat && that_present_serializationFormat))
-  return false;
-if (!this.serializationFormat.equals(that.serializationFormat))
-  return false;
-}
-
-boolean this_present_serializationClass = true && (this.serializationClass != null);
-boolean that_present_serializationClass = true && (that.serializationClass != null);
-if (this_present_serializationClass || that_present_serializationClass) {
-if (!(this_present_serializationClass && that_present_serializationClass))
-  return false;
-if (!this.serializationClass.equals(that.serializationClass))
-  return false;
-}
-
 boolean this_present_serializationLib = true && (this.serializationLib != null);
 boolean that_present_serializationLib = true && (that.serializationLib != null);
 if (this_present_serializationLib || that_present_serializationLib) {
@@ -253,42 +127,6 @@
   return false;
 }
 
-boolean this_present_fieldDelim = true && (this.fieldDelim != null);
-boolean that_present_fieldDelim = true && (that.fieldDelim != null);
-if (this_present_fieldDelim || that_present_fieldDelim) {
-if (!(this_present_fieldDelim && that_present_fieldDelim))
-  return false;
-if (!this.fieldDelim.equals(that.fieldDelim))
-  return false;
-}
-
-boolean this_present_collectionItemDelim = true && (this.collectionItemDelim != null);
-boolean that_present_collectionItemDelim = true && (that.collectionItemDelim != null);
-if (this_present_collectionItemDelim || that_present_collectionItemDelim) {
-if (!(this_present_collectionItemDelim && that_present_collectionItemDelim))
-  return false;
-if (!this.collectionItemDelim.equals(that.collectionItemDelim))
-  return false;
-}
-
-boolean this_present_mapKeyDelim = true && (this.mapKeyDelim != null);
-boolean that_present_mapKeyDelim = true && (that.mapKeyDelim != null);
-if (this_present_mapKeyDelim || that_present_mapKeyDelim) {
-if (!(this_present_mapKeyDelim && that_present_mapKeyDelim))
-  return false;
-if (!this.mapKeyDelim.equals(that.mapKeyDelim))
-  return false;
-}
-
-boolean this_present_lineDelim = true && (this.lineDelim != null);
-boolean that_present_lineDelim = true && (that.lineDelim != null);
-if (this_present_lineDelim || that_present_lineDelim) {
-if (!(this_present_lineDelim && that_present_lineDelim))
-  return false;
-if (!this.lineDelim.equals(that.lineDelim))
-  return false;
-}
-
 boolean this_present_parameters = true && (this.parameters != null);
 boolean that_present_parameters = true && (that.parameters != null);
 if (this_present_parameters || that_present_parameters) {
@@ -316,7 +154,7 @@
 }
 switch (field.id)
 {
-  case -1:
+  case 1:
     if (field.type == TType.STRING) {
       this.name = iprot.readString();
       this.__isset.name = true;
@@ -324,23 +162,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -2:
-    if (field.type == TType.STRING) {
-      this.serializationFormat = iprot.readString();
-      this.__isset.serializationFormat = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -3:
-    if (field.type == TType.STRING) {
-      this.serializationClass = iprot.readString();
-      this.__isset.serializationClass = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -4:
+  case 2:
     if (field.type == TType.STRING) {
       this.serializationLib = iprot.readString();
       this.__isset.serializationLib = true;
@@ -348,39 +170,7 @@
       TProtocolUtil.skip(iprot, field.type);
     }
     break;
-  case -5:
-    if (field.type == TType.STRING) {
-      this.fieldDelim = iprot.readString();
-      this.__isset.fieldDelim = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -6:
-    if (field.type == TType.STRING) {
-      this.collectionItemDelim = iprot.readString();
-      this.__isset.collectionItemDelim = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -7:
-    if (field.type == TType.STRING) {
-      this.mapKeyDelim = iprot.readString();
-      this.__isset.mapKeyDelim = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -8:
-    if (field.type == TType.STRING) {
-      this.lineDelim = iprot.readString();
-      this.__isset.lineDelim = true;
-    } else { 
-      TProtocolUtil.skip(iprot, field.type);
-    }
-    break;
-  case -9:
+  case 3:
     if (field.type == TType.MAP) {
       {
         TMap _map4 = iprot.readMapBegin();
@@ -416,71 +206,23 @@
 if (this.name != null) {
 field.name = "name";
 field.type = TType.STRING;
-field.id = -1;
+field.id = 1;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.name);
 oprot.writeFieldEnd();
 }
-if (this.serializationFormat != null) {
-field.name = "serializationFormat";
-field.type = TType.STRING;
-field.id = -2;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.serializationFormat);
-oprot.writeFieldEnd();
-}
-if (this.serializationClass != null) {
-field.name = "serializationClass";
-field.type = TType.STRING;
-field.id = -3;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.serializationClass);
-oprot.writeFieldEnd();
-}
 if (this.serializationLib != null) {
 field.name = "serializationLib";
 field.type = TType.STRING;
-field.id = -4;
+field.id = 2;
 oprot.writeFieldBegin(field);
 oprot.writeString(this.serializationLib);
 oprot.writeFieldEnd();
 }
-if (this.fieldDelim != null) {
-field.name = "fieldDelim";
-field.type = TType.STRING;
-field.id = -5;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.fieldDelim);
-oprot.writeFieldEnd();
-}
-if (this.collectionItemDelim != null) {
-field.name = "collectionItemDelim";
-field.type = TType.STRING;
-field.id = -6;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.collectionItemDelim);
-oprot.writeFieldEnd();
-}
-if (this.mapKeyDelim != null) {
-field.name = "mapKeyDelim";
-field.type = TType.STRING;
-field.id = -7;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.mapKeyDelim);
-oprot.writeFieldEnd();
-}
-if (this.lineDelim != null) {
-field.name = "lineDelim";
-field.type = TType.STRING;
-field.id = -8;
-oprot.writeFieldBegin(field);
-oprot.writeString(this.lineDelim);
-oprot.writeFieldEnd();
-}
 if (this.parameters != null) {
 field.name = "parameters";
 field.type = TType.MAP;
-field.id = -9;
+field.id = 3;
 oprot.writeFieldBegin(field);
 {
   oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.parameters.size()));
@@ -500,20 +242,8 @@
 StringBuilder sb = new StringBuilder("SerDeInfo(");
 sb.append("name:");
 sb.append(this.name);
-sb.append(",serializationFormat:");
-sb.append(this.serializationFormat);
-sb.append(",serializationClass:");
-sb.append(this.serializationClass);
 sb.append(",serializationLib:");
 sb.append(this.serializationLib);
-sb.append(",fieldDelim:");
-sb.append(this.fieldDelim);
-sb.append(",collectionItemDelim:");
-sb.append(this.collectionItemDelim);
-sb.append(",mapKeyDelim:");
-sb.append(this.mapKeyDelim);
-sb.append(",lineDelim:");
-sb.append(this.lineDelim);
 sb.append(",parameters:");
 sb.append(this.parameters);
 sb.append(")");