You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/04/14 17:56:02 UTC

svn commit: r1587230 [1/2] - in /hive/trunk: hcatalog/ hcatalog/bin/ hcatalog/build-support/ant/ hcatalog/conf/ hcatalog/core/src/main/java/org/apache/hcatalog/cli/ hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/ hcatalog/core/src...

Author: hashutosh
Date: Mon Apr 14 15:55:57 2014
New Revision: 1587230

URL: http://svn.apache.org/r1587230
Log:
HIVE-6432 : Remove deprecated methods in HCatalog (Sushanth Sowmyan via Ashutosh Chauhan)

Added:
    hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
    hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
Removed:
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatCli.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/ErrorType.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatConstants.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatException.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/DataType.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/DefaultHCatRecord.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecord.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordable.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/LazyHCatRecord.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/Pair.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/ReaderWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchema.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/DataTransferFactory.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/EntityBase.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/HCatReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/HCatWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/ReadEntity.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/ReaderContext.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/WriteEntity.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/WriterContext.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/state/DefaultStateProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/state/StateProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/DefaultOutputFormatContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/DefaultRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FosterStorageHandler.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java.broken
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java.broken
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java.broken
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatSplit.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InitializeInput.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputFormatContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/PartInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/ProgressReporter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/RecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/StorerInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/oozie/JavaAction.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/rcfile/RCFileMapReduceInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/rcfile/RCFileMapReduceOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/rcfile/RCFileMapReduceRecordReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/storagehandler/DummyHCatAuthProvider.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/ExitException.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/HcatTestUtils.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/MiniCluster.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/NoExitSecurityManager.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/DummyStorageHandler.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestStorageHandlerProperties.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestUseDatabase.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/HCatDataCheckUtil.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestDefaultHCatRecord.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestLazyHCatRecord.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestReaderWriter.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/schema/TestHCatSchema.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/fileformats/TestOrcDynamicPartitioned.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapred/TestHiveHCatInputFormat.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatEximInputFormat.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatEximOutputFormat.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatInputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestInputJobInfo.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileInputStorageDriver.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileOutputStorageDriver.java.broken
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatBaseLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatBaseStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatEximLoader.java.broken
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatEximStorer.java.broken
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/HCatStorerWrapper.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/MockLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/MyPigStorage.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatEximLoader.java.broken
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatEximStorer.java.broken
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerWrapper.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestOrcHCatLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestOrcHCatStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPermsInheritance.java.broken
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigHCatUtil.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigStorageDriver.java.broken
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/listener/NotificationListener.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/AddPartitionMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/CreateDatabaseMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/CreateTableMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/DropDatabaseMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/DropPartitionMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/DropTableMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/HCatEventMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/MessageDeserializer.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/MessageFactory.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/jms/MessagingUtils.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONAddPartitionMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONCreateDatabaseMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONCreateTableMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONDropDatabaseMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONDropPartitionMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONDropTableMessage.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONMessageDeserializer.java
    hive/trunk/hcatalog/server-extensions/src/main/java/org/apache/hcatalog/messaging/json/JSONMessageFactory.java
    hive/trunk/hcatalog/server-extensions/src/test/java/org/apache/hcatalog/listener/TestMsgBusConnection.java
    hive/trunk/hcatalog/server-extensions/src/test/java/org/apache/hcatalog/listener/TestNotificationListener.java
    hive/trunk/hcatalog/src/java/org/apache/hcatalog/package-info.java
    hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
    hive/trunk/hcatalog/storage-handlers/hbase/conf/revision-manager-site.xml
    hive/trunk/hcatalog/storage-handlers/hbase/if/transaction.thrift
    hive/trunk/hcatalog/storage-handlers/hbase/pom.xml
    hive/trunk/hcatalog/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointProtos.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevisionList.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HCatTableSnapshot.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ImportSequenceFile.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ResultConverter.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/FamilyRevision.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/IDGenerator.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/PathUtil.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RMConstants.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RPCConverter.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerConfiguration.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/TableSnapshot.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/Transaction.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKBasedRevisionManager.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKUtil.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/LockListener.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ProtocolSupport.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/WriteLock.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZNodeName.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZooKeeperOperation.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/package-info.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/protobuf/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.proto
    hive/trunk/hcatalog/storage-handlers/hbase/src/resources/revision-manager-default.xml
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/log4j.xml
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/IDGenClient.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerConfiguration.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestThriftSerialization.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/lock/TestWriteLock.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/lock/TestZNodeName.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseStorageHandler.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java
    hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ObjectNotFoundException.java
    hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
    hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
    hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
    hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
Modified:
    hive/trunk/hcatalog/bin/hcat
    hive/trunk/hcatalog/bin/hcat.py
    hive/trunk/hcatalog/bin/templeton.cmd
    hive/trunk/hcatalog/build-support/ant/checkstyle.xml
    hive/trunk/hcatalog/conf/proto-hive-site.xml
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java
    hive/trunk/hcatalog/pom.xml
    hive/trunk/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml
    hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf
    hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf
    hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
    hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
    hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
    hive/trunk/itests/hcatalog-unit/pom.xml
    hive/trunk/packaging/pom.xml
    hive/trunk/packaging/src/main/assembly/bin.xml

Modified: hive/trunk/hcatalog/bin/hcat
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/bin/hcat?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/bin/hcat (original)
+++ hive/trunk/hcatalog/bin/hcat Mon Apr 14 15:55:57 2014
@@ -119,11 +119,6 @@ if [ "$(ls -1 $HCAT_PREFIX/share/hcatalo
 fi
 HCAT_JAR=`ls $HCAT_PREFIX/share/hcatalog/hive-hcatalog-core-[0-9]*.jar`
 
-# Find the storage-handler jars.
-for jar in ${HCAT_PREFIX}/share/hcatalog/storage-handlers/*/lib/*.jar ; do
-	HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar
-done
-
 # Add all of the other jars to our classpath
 for jar in ${HIVE_LIB_DIR}/*.jar ; do
 	HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar

Modified: hive/trunk/hcatalog/bin/hcat.py
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/bin/hcat.py?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/bin/hcat.py (original)
+++ hive/trunk/hcatalog/bin/hcat.py Mon Apr 14 15:55:57 2014
@@ -82,11 +82,6 @@ if 'HADOOP_CLASSPATH' not in os.environ:
 os.environ['HADOOP_CLASSPATH'] += os.pathsep + hcatJars[0]
 # done adding the hcatalog jar to the hadoop classpath
 
-# adding hbase storage-handler jars
-hbaseStorageJars =  glob.glob(os.path.join(hcatPrefix, 'share', 'hcatalog', 'storage-handlers', 'hbase', 'lib', 'hive-hcatalog-hbase-storage-handler-*.jar'))
-if len(hbaseStorageJars) == 1:
-  os.environ['HADOOP_CLASSPATH'] += os.pathsep + hbaseStorageJars[0]
-
 # add all the other jars
 hcatLibJarFiles = os.path.join(hcatPrefix, 'share', 'hcatalog', 'lib', '*')
 os.environ['HADOOP_CLASSPATH'] += os.pathsep + hcatLibJarFiles

Modified: hive/trunk/hcatalog/bin/templeton.cmd
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/bin/templeton.cmd?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/bin/templeton.cmd (original)
+++ hive/trunk/hcatalog/bin/templeton.cmd Mon Apr 14 15:55:57 2014
@@ -61,7 +61,7 @@ setlocal enabledelayedexpansion
     set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j.properties
   )
   set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configuration=%TEMPLETON_LOG4J% %HADOOP_OPTS%
-  set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hcatalog.templeton.Main
+  set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hive.hcatalog.templeton.Main
   
   if defined service_entry (
     call :makeServiceXml %arguments%

Modified: hive/trunk/hcatalog/build-support/ant/checkstyle.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support/ant/checkstyle.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/build-support/ant/checkstyle.xml (original)
+++ hive/trunk/hcatalog/build-support/ant/checkstyle.xml Mon Apr 14 15:55:57 2014
@@ -43,17 +43,10 @@
           <exclude name="src/packages/**"/> <!-- TODO: delete packages configs -->
           <exclude name="src/test/e2e/hcatalog/data/**"/>
           <exclude name="src/test/e2e/templeton/inpdir/nums.txt"/>
-          <exclude name="storage-handlers/hbase/src/gen-java/**"/>
-          <exclude name="storage-handlers/hbase/src/test/all-tests"/>
-          <exclude name="storage-handlers/hbase/src/test/excluded-tests"/>
-          <exclude name="storage-handlers/hbase/metastore_db/**"/>
-          <exclude name="storage-handlers/hbase/partitions*"/>
-          <exclude name="storage-handlers/hbase/.partitions*"/>
           <exclude name="hcatalog-pig-adapter/target/**"/>
           <exclude name="server-extensions/target/**"/>
           <exclude name="core/target/**"/>
           <exclude name="webhcat/java-client/target/**"/>
-          <exclude name="storage-handlers/hbase/target/**"/>
           <exclude name="webhcat/svr/target/**"/>
           <exclude name="KEYS"/>
           <exclude name="LICENSE.txt"/>

Modified: hive/trunk/hcatalog/conf/proto-hive-site.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/conf/proto-hive-site.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/conf/proto-hive-site.xml (original)
+++ hive/trunk/hcatalog/conf/proto-hive-site.xml Mon Apr 14 15:55:57 2014
@@ -108,7 +108,7 @@
 
 <property>
   <name>hive.security.authorization.manager</name>
-  <value>org.apache.hcatalog.security.StorageDelegationAuthorizationProvider</value>
+  <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
   <description>the hive client authorization manager class name.
   The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.
   HCatalog uses a model, where authorization checks are delegated to the storage layer (hdfs, hbase, ...).

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java Mon Apr 14 15:55:57 2014
@@ -134,7 +134,8 @@ similarly for mapKeyType/mapKeyTypeInfo 
   String fieldName = null;
   String comment = null;
   /**
-   * @deprecated use {@link #typeInfo}
+   * @deprecated as of 0.13, slated for removal with 0.15
+   * use {@link #typeInfo} instead
    */
   Type type = null;
   Category category = null;
@@ -146,7 +147,7 @@ similarly for mapKeyType/mapKeyTypeInfo 
   HCatSchema subSchema = null;
 
   // populated if column is Map type
-  @Deprecated
+  @Deprecated // @deprecated as of 0.13, slated for removal with 0.15
   Type mapKeyType = null;
 
   private String typeString = null;
@@ -167,7 +168,8 @@ similarly for mapKeyType/mapKeyTypeInfo 
   /**
    * Returns type of the field
    * @return type of the field
-   * @deprecated use {@link #getTypeInfo()}
+   * @deprecated as of 0.13, slated for removal with 0.15
+   * use {@link #getTypeInfo()} instead
    */
   public Type getType() {
     return type;
@@ -203,7 +205,8 @@ similarly for mapKeyType/mapKeyTypeInfo 
    * @param fieldName Name of the primitive field
    * @param type Type of the primitive field
    * @throws HCatException if call made on non-primitive types
-   * @deprecated as of Hive 0.13; use {@link #HCatFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, String)}
+   * @deprecated as of 0.13, slated for removal with 0.15
+   * use {@link #HCatFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, String)}
    */
   public HCatFieldSchema(String fieldName, Type type, String comment) throws HCatException {
     assertTypeInCategory(type, Category.PRIMITIVE, fieldName);
@@ -255,7 +258,8 @@ similarly for mapKeyType/mapKeyTypeInfo 
    * @param mapKeyType - key type of the Map
    * @param mapValueSchema - subschema of the value of the Map
    * @throws HCatException if call made on non-Map types
-   * @deprecated use {@link #createMapTypeFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, HCatSchema, String)}
+   * @deprecated as of 0.13, slated for removal with 0.15
+   * use {@link #createMapTypeFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, HCatSchema, String)}
    */
   public HCatFieldSchema(String fieldName, Type type, Type mapKeyType, HCatSchema mapValueSchema, String comment) throws HCatException {
     assertTypeInCategory(type, Category.MAP, fieldName);
@@ -291,7 +295,8 @@ similarly for mapKeyType/mapKeyTypeInfo 
     return subSchema;
   }
   /**
-   * @deprecated use {@link #getMapKeyTypeInfo()}
+   * @deprecated as of 0.13, slated for removal with 0.15
+   * use {@link #getMapKeyTypeInfo()} instead
    */
   public Type getMapKeyType() throws HCatException {
     assertTypeInCategory(this.type, Category.MAP, this.fieldName);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java Mon Apr 14 15:55:57 2014
@@ -64,7 +64,7 @@ public class HCatOutputFormatWriter exte
     try {
       job = new Job(conf);
       HCatOutputFormat.setOutput(job, jobInfo);
-      HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job));
+      HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job.getConfiguration()));
       HCatOutputFormat outFormat = new HCatOutputFormat();
       outFormat.checkOutputSpecs(job);
       outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java Mon Apr 14 15:55:57 2014
@@ -52,7 +52,7 @@ class DefaultRecordWriterContainer exten
   public DefaultRecordWriterContainer(TaskAttemptContext context,
                     org.apache.hadoop.mapred.RecordWriter<? super WritableComparable<?>, ? super Writable> baseRecordWriter) throws IOException, InterruptedException {
     super(context, baseRecordWriter);
-    jobInfo = HCatOutputFormat.getJobInfo(context);
+    jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
     storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
     HCatOutputFormat.configureOutputStorageHandler(context);
     serDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), context.getConfiguration());

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java Mon Apr 14 15:55:57 2014
@@ -99,7 +99,7 @@ class FileOutputCommitterContainer exten
   public FileOutputCommitterContainer(JobContext context,
                     org.apache.hadoop.mapred.OutputCommitter baseCommitter) throws IOException {
     super(context, baseCommitter);
-    jobInfo = HCatOutputFormat.getJobInfo(context);
+    jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
     dynamicPartitioningUsed = jobInfo.isDynamicPartitioningUsed();
 
     this.partitionsDiscovered = !dynamicPartitioningUsed;
@@ -177,7 +177,7 @@ class FileOutputCommitterContainer exten
         }
       }
       Path src;
-      OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext);
+      OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext.getConfiguration());
       Path tblPath = new Path(jobInfo.getTableInfo().getTableLocation());
       if (dynamicPartitioningUsed) {
         if (!customDynamicLocationUsed) {
@@ -230,7 +230,7 @@ class FileOutputCommitterContainer exten
     }
     registerPartitions(jobContext);
     // create _SUCCESS FILE if so requested.
-    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext);
+    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext.getConfiguration());
     if (getOutputDirMarking(jobContext.getConfiguration())) {
       Path outputPath = new Path(jobInfo.getLocation());
       FileSystem fileSys = outputPath.getFileSystem(jobContext
@@ -666,7 +666,7 @@ class FileOutputCommitterContainer exten
   private void discoverPartitions(JobContext context) throws IOException {
     if (!partitionsDiscovered) {
       //      LOG.info("discover ptns called");
-      OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
+      OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
 
       harProcessor.setEnabled(jobInfo.getHarRequested());
 
@@ -739,7 +739,7 @@ class FileOutputCommitterContainer exten
     if (dynamicPartitioningUsed){
       discoverPartitions(context);
     }
-    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
+    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
     Configuration conf = context.getConfiguration();
     Table table = new Table(jobInfo.getTableInfo().getTable());
     Path tblPath = new Path(table.getTTable().getSd().getLocation());

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java Mon Apr 14 15:55:57 2014
@@ -91,7 +91,7 @@ class FileOutputFormatContainer extends 
       sd.getSerializedClass().getName());
 
     RecordWriter<WritableComparable<?>, HCatRecord> rw;
-    if (HCatBaseOutputFormat.getJobInfo(context).isDynamicPartitioningUsed()){
+    if (HCatBaseOutputFormat.getJobInfo(context.getConfiguration()).isDynamicPartitioningUsed()){
       // When Dynamic partitioning is used, the RecordWriter instance initialized here isn't used. Can use null.
       // (That's because records can't be written until the values of the dynamic partitions are deduced.
       // By that time, a new local instance of RecordWriter, with the correct output-path, will be constructed.)
@@ -113,7 +113,7 @@ class FileOutputFormatContainer extends 
 
   @Override
   public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
-    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
+    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
     HiveMetaStoreClient client = null;
     try {
       HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
@@ -143,7 +143,7 @@ class FileOutputFormatContainer extends 
     //this needs to be manually set, under normal circumstances MR Task does this
     setWorkOutputPath(context);
     return new FileOutputCommitterContainer(context,
-      HCatBaseOutputFormat.getJobInfo(context).isDynamicPartitioningUsed() ?
+      HCatBaseOutputFormat.getJobInfo(context.getConfiguration()).isDynamicPartitioningUsed() ?
         null :
         new JobConf(context.getConfiguration()).getOutputCommitter());
   }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java Mon Apr 14 15:55:57 2014
@@ -83,7 +83,7 @@ class FileRecordWriterContainer extends 
                    TaskAttemptContext context) throws IOException, InterruptedException {
     super(context, baseWriter);
     this.context = context;
-    jobInfo = HCatOutputFormat.getJobInfo(context);
+    jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
 
     storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
     serDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), context.getConfiguration());
@@ -181,7 +181,7 @@ class FileRecordWriterContainer extends 
 
         org.apache.hadoop.mapred.TaskAttemptContext currTaskContext = HCatMapRedUtil.createTaskAttemptContext(context);
         configureDynamicStorageHandler(currTaskContext, dynamicPartValues);
-        localJobInfo = HCatBaseOutputFormat.getJobInfo(currTaskContext);
+        localJobInfo = HCatBaseOutputFormat.getJobInfo(currTaskContext.getConfiguration());
 
         //setup serDe
         SerDe currSerDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), currTaskContext.getJobConf());
@@ -233,7 +233,7 @@ class FileRecordWriterContainer extends 
         baseDynamicCommitters.put(dynKey, baseOutputCommitter);
         dynamicContexts.put(dynKey, currTaskContext);
         dynamicObjectInspectors.put(dynKey, InternalUtil.createStructObjectInspector(jobInfo.getOutputSchema()));
-        dynamicOutputJobInfo.put(dynKey, HCatOutputFormat.getJobInfo(dynamicContexts.get(dynKey)));
+        dynamicOutputJobInfo.put(dynKey, HCatOutputFormat.getJobInfo(dynamicContexts.get(dynKey).getConfiguration()));
       }
 
       localJobInfo = dynamicOutputJobInfo.get(dynKey);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java Mon Apr 14 15:55:57 2014
@@ -226,17 +226,6 @@ public abstract class HCatBaseInputForma
   }
 
   /**
-   * @see org.apache.hive.hcatalog.mapreduce.HCatBaseInputFormat#getTableSchema(org.apache.hadoop.conf.Configuration)
-   * @deprecated Use {@link #getTableSchema(org.apache.hadoop.conf.Configuration)}
-   */
-  @Deprecated
-  public static HCatSchema getTableSchema(JobContext context)
-    throws IOException {
-    return getTableSchema(context.getConfiguration());
-  }
-
-
-  /**
    * Gets the HCatTable schema for the table specified in the HCatInputFormat.setInput call
    * on the specified job context. This information is available only after HCatInputFormat.setInput
    * has been called for a JobContext.

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java Mon Apr 14 15:55:57 2014
@@ -43,14 +43,6 @@ public abstract class HCatBaseOutputForm
 //  static final private Log LOG = LogFactory.getLog(HCatBaseOutputFormat.class);
 
   /**
-   * @see org.apache.hive.hcatalog.mapreduce.HCatBaseOutputFormat#getTableSchema(org.apache.hadoop.conf.Configuration)
-   * @deprecated Use {@link #getTableSchema(org.apache.hadoop.conf.Configuration)}
-   */
-  public static HCatSchema getTableSchema(JobContext context) throws IOException {
-    return getTableSchema(context.getConfiguration());
-  }
-
-  /**
    * Gets the table schema for the table specified in the HCatOutputFormat.setOutput call
    * on the specified job context.
    * @param conf the Configuration object
@@ -81,7 +73,7 @@ public abstract class HCatBaseOutputForm
    */
   protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) 
     throws IOException {
-    OutputJobInfo jobInfo = getJobInfo(context);
+    OutputJobInfo jobInfo = getJobInfo(context.getConfiguration());
     HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), 
         jobInfo.getTableInfo().getStorerInfo());
     // Always configure storage handler with jobproperties/jobconf before calling any methods on it
@@ -97,14 +89,6 @@ public abstract class HCatBaseOutputForm
   }
 
   /**
-   * @see org.apache.hive.hcatalog.mapreduce.HCatBaseOutputFormat#getJobInfo(org.apache.hadoop.conf.Configuration)
-   * @deprecated use {@link #getJobInfo(org.apache.hadoop.conf.Configuration)}
-   */
-  public static OutputJobInfo getJobInfo(JobContext jobContext) throws IOException {
-    return getJobInfo(jobContext.getConfiguration());
-  }
-
-  /**
    * Gets the HCatOuputJobInfo object by reading the Configuration and deserializing
    * the string. If InputJobInfo is not present in the configuration, throws an
    * exception since that means HCatOutputFormat.setOutput has not been called.

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java Mon Apr 14 15:55:57 2014
@@ -101,7 +101,7 @@ public class HCatInputFormat extends HCa
   }
 
   /**
-   * @deprecated As of 0.13
+   * @deprecated as of 0.13, slated for removal with 0.15
    * Use {@link #setInput(org.apache.hadoop.conf.Configuration, String, String, String)} instead,
    * to specify a partition filter to directly initialize the input with.
    */

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java Mon Apr 14 15:55:57 2014
@@ -79,21 +79,6 @@ public class InputJobInfo implements Ser
     return new InputJobInfo(databaseName, tableName, filter, properties);
   }
 
-  /**
-   * Initializes a new InputJobInfo
-   * for reading data from a table.
-   * @param databaseName the db name
-   * @param tableName the table name
-   * @param filter the partition filter
-   */
-  @Deprecated
-  public static InputJobInfo create(String databaseName,
-                    String tableName,
-                    String filter) {
-    return create(databaseName, tableName, filter, null);
-  }
-
-
   private InputJobInfo(String databaseName,
              String tableName,
              String filter,

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java Mon Apr 14 15:55:57 2014
@@ -401,7 +401,7 @@ public abstract class HCatMapReduceTest 
 
     HCatInputFormat.setInput(job, dbName, tableName);
 
-    return HCatInputFormat.getTableSchema(job);
+    return HCatInputFormat.getTableSchema(job.getConfiguration());
   }
 
 }

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java Mon Apr 14 15:55:57 2014
@@ -145,7 +145,7 @@ public class TestHCatOutputFormat extend
     OutputJobInfo info = OutputJobInfo.create(dbName, tblName, partitionValues);
 
     HCatOutputFormat.setOutput(job, info);
-    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(job);
+    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(job.getConfiguration());
 
     assertNotNull(jobInfo.getTableInfo());
     assertEquals(1, jobInfo.getPartitionValues().size());

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java Mon Apr 14 15:55:57 2014
@@ -37,12 +37,4 @@ public class TestInputJobInfo extends HC
     Assert.assertEquals("value", jobInfo.getProperties().getProperty("key"));
   }
 
-  @Test
-  public void test3ArgCreate() throws Exception {
-    InputJobInfo jobInfo = InputJobInfo.create("Db", "Table", "Filter");
-    Assert.assertEquals("Db", jobInfo.getDatabaseName());
-    Assert.assertEquals("Table", jobInfo.getTableName());
-    Assert.assertEquals("Filter", jobInfo.getFilter());
-    Assert.assertEquals(0, jobInfo.getProperties().size());
-  }
 }

Modified: hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java (original)
+++ hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java Mon Apr 14 15:55:57 2014
@@ -197,7 +197,7 @@ public class HCatStorer extends HCatBase
         throw new PigException(he.getMessage(),
           PigHCatUtil.PIG_EXCEPTION_CODE, he);
       }
-      HCatSchema hcatTblSchema = HCatOutputFormat.getTableSchema(job);
+      HCatSchema hcatTblSchema = HCatOutputFormat.getTableSchema(job.getConfiguration());
       try {
         doSchemaValidations(pigSchema, hcatTblSchema);
       } catch (HCatException he) {

Modified: hive/trunk/hcatalog/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/pom.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/pom.xml (original)
+++ hive/trunk/hcatalog/pom.xml Mon Apr 14 15:55:57 2014
@@ -43,7 +43,6 @@
     <module>server-extensions</module>
     <module>webhcat/java-client</module>
     <module>webhcat/svr</module>
-    <module>storage-handlers/hbase</module>
     <module>streaming</module>
   </modules>
 

Modified: hive/trunk/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml (original)
+++ hive/trunk/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml Mon Apr 14 15:55:57 2014
@@ -149,7 +149,7 @@ and writes out all the records attached 
 <!-- ==================================================================== -->
 <section>
     <title>Complete Example Program</title>
-<p>A complete java program for the reader and writer examples above can be found at: <a href="https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestReaderWriter.java">https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestReaderWriter.java</a>.</p>
+<p>A complete java program for the reader and writer examples above can be found at: <a href="https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java">https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java</a>.</p>
 
 </section>
 

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf Mon Apr 14 15:55:57 2014
@@ -219,21 +219,5 @@ jar :FUNCPATH:/testudf.jar org.apache.hi
                                 }
                         ],
                 }, # end g
-                {
-                        'name' => 'Hadoop_HBase',
-                        'tests' => [
-                                {
-                                 'num' => 1
-                                ,'hcat_prep'=>q\drop table if exists hadoop_hbase_1;
-create table hadoop_hbase_1(key string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:gpa');\
-                                ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.HBaseReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: :INPATH:/studenttab10k hadoop_hbase_1 :OUTPATH:
-\,
-                                ,'sql' => q\select name, sum(gpa) from studenttab10k group by name;\
-                                ,'floatpostprocess' => 1
-                                ,'delimiter' => '	'
-                                },
-                        ],
-                }, # end g
          ]
 }

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf Mon Apr 14 15:55:57 2014
@@ -314,51 +314,6 @@ store c into ':OUTPATH:';\
                         ],
                 },
                 {
-                        'name' => 'Pig_HBase',
-                        'tests' => [
-                                {
-                                 'num' => 1
-                                ,'hcat_prep'=>q\drop table if exists pig_hbase_1;
-create table pig_hbase_1(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');\
-                                ,'pig' => q\set hcat.hbase.output.bulkMode 'false'
-a = load ':INPATH:/studenttab10k' as (name:chararray, age:int, gpa:float);
-b = group a by name;
-c = foreach b generate group as name, AVG(a.age) as age, AVG(a.gpa) as gpa;
-d = foreach c generate name as key, (chararray)age, (chararray)gpa as gpa;
-store d into 'pig_hbase_1' using org.apache.hcatalog.pig.HCatStorer();
-exec
-e = load 'pig_hbase_1' using org.apache.hcatalog.pig.HCatLoader();
-store e into ':OUTPATH:';\,
-                                ,'result_table' => ['pig_hbase_1','?']
-                                ,'result_table_loader' => 'org.apache.hcatalog.pig.HCatLoader()'
-				,'sql'   => [ 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;', 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;' ]
-                                ,'floatpostprocess' => 1
-                                ,'delimiter' => '	'
-                                },
-                                {
-                                 # multiquery
-                                 'num' => 2
-                                ,'hcat_prep'=>q\drop table if exists pig_hbase_2_1;
-create table pig_hbase_2_1(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
-drop table if exists pig_hbase_2_2;
-create table pig_hbase_2_2(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
-\
-                                ,'pig' => q\set hcat.hbase.output.bulkMode 'false'
-a = load ':INPATH:/studenttab10k' as (name:chararray, age:int, gpa:float);
-b = group a by name;
-c = foreach b generate group as name, AVG(a.age) as age, AVG(a.gpa) as gpa;
-d = foreach c generate name as key, (chararray)age, (chararray)gpa as gpa;
-store d into 'pig_hbase_2_1' using org.apache.hcatalog.pig.HCatStorer();
-store d into 'pig_hbase_2_2' using org.apache.hcatalog.pig.HCatStorer();\,
-                                ,'result_table' => ['pig_hbase_2_1','pig_hbase_2_2']
-                                ,'result_table_loader' => 'org.apache.hcatalog.pig.HCatLoader()'
-				,'sql'   => [ 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;', 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;']
-                                ,'floatpostprocess' => 1
-                                ,'delimiter' => '	'
-                                }
-                        ],
-                }, # end g
-                {
                         'name' => 'Pig_HCat_Barrier',
                         'tests' => [
                                 {

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java Mon Apr 14 15:55:57 2014
@@ -47,7 +47,7 @@ import org.apache.hive.hcatalog.mapreduc
  * other columns. This is to simulate a typical operation in a map reduce
  * program to test that hcat hands the right data to the map reduce program
  *
- * Usage: hadoop jar org.apache.hcatalog.utils.HBaseReadWrite -libjars
+ * Usage: hadoop jar org.apache.hive.hcatalog.utils.WriteTextPartitioned -libjars
  * &lt;hcat_jar&gt; * &lt;serveruri&gt; &lt;input_tablename&gt; &lt;output_tablename&gt; [filter]
  * If filter is given it will be provided as the partition to write to.
  */

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf Mon Apr 14 15:55:57 2014
@@ -151,20 +151,9 @@ $cfg = 
      'status_code' => 200,
     },
     {
-     # GET queue?user.name=UNAME_OTHER, only get jobid as an array
-     'num' => 8,
-     'depends_on' => 'JOBS_1,JOBS_2,JOBS_3',
-     'method' => 'GET',
-     'url' => ':TEMPLETON_URL:/templeton/v1/queue?user.name=:UNAME_OTHER:',
-     'user_name' => ':UNAME_OTHER:',
-     'format_header' => 'Content-Type: application/json',
-     'json_path' => {'$[-1:]' => 'job_.*'},
-     'status_code' => 200,
-    },
-    {
      # GET jobs?user.name=UNAME_OTHER&fields=*, get all the details of the oldest 2 jobs whose
      # id is greater than job_0
-     'num' => 9,
+     'num' => 8,
      'depends_on' => 'JOBS_1,JOBS_2,JOBS_3',
      'method' => 'GET',
      'url' => ':TEMPLETON_URL:/templeton/v1/jobs?user.name=:UNAME_OTHER:&fields=*&numrecords=2&jobid=job_0',

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Mon Apr 14 15:55:57 2014
@@ -815,49 +815,6 @@ public class Server {
 
   /**
    * Return the status of the jobid.
-   * @deprecated use GET jobs/{jobid} instead.
-   */
-  @Deprecated
-  @GET
-  @Path("queue/{jobid}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public QueueStatusBean showQueueId(@PathParam("jobid") String jobid)
-    throws NotAuthorizedException, BadParam, IOException, InterruptedException {
-    return showJobId(jobid);
-  }
-
-  /**
-   * Kill a job in the queue.
-   * @deprecated use DELETE jobs/{jobid} instead.
-   */
-  @Deprecated
-  @DELETE
-  @Path("queue/{jobid}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public QueueStatusBean deleteQueueId(@PathParam("jobid") String jobid)
-    throws NotAuthorizedException, BadParam, IOException, InterruptedException {
-    return deleteJobId(jobid);
-  }
-
-  /**
-   * Return all the known job ids for this user.
-   * @deprecated use GET jobs instead.
-   */
-  @Deprecated
-  @GET
-  @Path("queue")
-  @Produces({MediaType.APPLICATION_JSON})
-  public List<String> showQueueList(@QueryParam("showall") boolean showall)
-    throws NotAuthorizedException, BadParam, IOException, InterruptedException {
-
-    verifyUser();
-
-    ListDelegator d = new ListDelegator(appConf);
-    return d.run(getDoAsUser(), showall);
-  }
-
-  /**
-   * Return the status of the jobid.
    */
   @GET
   @Path("jobs/{jobid}")

Modified: hive/trunk/itests/hcatalog-unit/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hcatalog-unit/pom.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/itests/hcatalog-unit/pom.xml (original)
+++ hive/trunk/itests/hcatalog-unit/pom.xml Mon Apr 14 15:55:57 2014
@@ -55,14 +55,13 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive.hcatalog</groupId>
-      <artifactId>hive-hcatalog-hbase-storage-handler</artifactId>
+      <artifactId>hive-hcatalog-pig-adapter</artifactId>
       <version>${project.version}</version>
-      <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hive.hcatalog</groupId>
-      <artifactId>hive-hcatalog-pig-adapter</artifactId>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-hbase-handler</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>

Added: hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java?rev=1587230&view=auto
==============================================================================
--- hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java (added)
+++ hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java Mon Apr 14 15:55:57 2014
@@ -0,0 +1,370 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hive.hcatalog.hbase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.ServerSocket;
+
+/**
+ * MiniCluster class composed of a number of Hadoop Minicluster implementations
+ * and other necessary daemons needed for testing (HBase, Hive MetaStore, Zookeeper, MiniMRCluster)
+ */
+public class ManyMiniCluster {
+
+  //MR stuff
+  private boolean miniMRClusterEnabled;
+  private MiniMRCluster mrCluster;
+  private int numTaskTrackers;
+  private JobConf jobConf;
+
+  //HBase stuff
+  private boolean miniHBaseClusterEnabled;
+  private MiniHBaseCluster hbaseCluster;
+  private String hbaseRoot;
+  private Configuration hbaseConf;
+  private String hbaseDir;
+
+  //ZK Stuff
+  private boolean miniZookeeperClusterEnabled;
+  private MiniZooKeeperCluster zookeeperCluster;
+  private int zookeeperPort;
+  private String zookeeperDir;
+
+  //DFS Stuff
+  private MiniDFSCluster dfsCluster;
+
+  //Hive Stuff
+  private boolean miniHiveMetastoreEnabled;
+  private HiveConf hiveConf;
+  private HiveMetaStoreClient hiveMetaStoreClient;
+
+  private final File workDir;
+  private boolean started = false;
+
+
+  /**
+   * create a cluster instance using a builder which will expose configurable options
+   * @param workDir working directory ManyMiniCluster will use for all of it's *Minicluster instances
+   * @return a Builder instance
+   */
+  public static Builder create(File workDir) {
+    return new Builder(workDir);
+  }
+
+  private ManyMiniCluster(Builder b) {
+    workDir = b.workDir;
+    numTaskTrackers = b.numTaskTrackers;
+    hiveConf = b.hiveConf;
+    jobConf = b.jobConf;
+    hbaseConf = b.hbaseConf;
+    miniMRClusterEnabled = b.miniMRClusterEnabled;
+    miniHBaseClusterEnabled = b.miniHBaseClusterEnabled;
+    miniHiveMetastoreEnabled = b.miniHiveMetastoreEnabled;
+    miniZookeeperClusterEnabled = b.miniZookeeperClusterEnabled;
+  }
+
+  protected synchronized void start() {
+    try {
+      if (!started) {
+        FileUtil.fullyDelete(workDir);
+        if (miniMRClusterEnabled) {
+          setupMRCluster();
+        }
+        if (miniZookeeperClusterEnabled || miniHBaseClusterEnabled) {
+          miniZookeeperClusterEnabled = true;
+          setupZookeeper();
+        }
+        if (miniHBaseClusterEnabled) {
+          setupHBaseCluster();
+        }
+        if (miniHiveMetastoreEnabled) {
+          setUpMetastore();
+        }
+      }
+    } catch (Exception e) {
+      throw new IllegalStateException("Failed to setup cluster", e);
+    }
+  }
+
+  protected synchronized void stop() {
+    if (hbaseCluster != null) {
+      HConnectionManager.deleteAllConnections(true);
+      try {
+        hbaseCluster.shutdown();
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+      hbaseCluster = null;
+    }
+    if (zookeeperCluster != null) {
+      try {
+        zookeeperCluster.shutdown();
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+      zookeeperCluster = null;
+    }
+    if (mrCluster != null) {
+      try {
+        mrCluster.shutdown();
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+      mrCluster = null;
+    }
+    if (dfsCluster != null) {
+      try {
+        dfsCluster.getFileSystem().close();
+        dfsCluster.shutdown();
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+      dfsCluster = null;
+    }
+    try {
+      FileSystem.closeAll();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    started = false;
+  }
+
+  /**
+   * @return Configuration of mini HBase cluster
+   */
+  public Configuration getHBaseConf() {
+    return HBaseConfiguration.create(hbaseConf);
+  }
+
+  /**
+   * @return Configuration of mini MR cluster
+   */
+  public Configuration getJobConf() {
+    return new Configuration(jobConf);
+  }
+
+  /**
+   * @return Configuration of Hive Metastore, this is a standalone not a daemon
+   */
+  public HiveConf getHiveConf() {
+    return new HiveConf(hiveConf);
+  }
+
+  /**
+   * @return Filesystem used by MiniMRCluster and MiniHBaseCluster
+   */
+  public FileSystem getFileSystem() {
+    try {
+      return FileSystem.get(jobConf);
+    } catch (IOException e) {
+      throw new IllegalStateException("Failed to get FileSystem", e);
+    }
+  }
+
+  /**
+   * @return Metastore client instance
+   */
+  public HiveMetaStoreClient getHiveMetaStoreClient() {
+    return hiveMetaStoreClient;
+  }
+
+  private void setupMRCluster() {
+    try {
+      final int jobTrackerPort = findFreePort();
+      final int taskTrackerPort = findFreePort();
+
+      if (jobConf == null)
+        jobConf = new JobConf();
+
+      jobConf.setInt("mapred.submit.replication", 1);
+      jobConf.set("yarn.scheduler.capacity.root.queues", "default");
+      jobConf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+      //conf.set("hadoop.job.history.location",new File(workDir).getAbsolutePath()+"/history");
+      System.setProperty("hadoop.log.dir", new File(workDir, "/logs").getAbsolutePath());
+
+      mrCluster = new MiniMRCluster(jobTrackerPort,
+        taskTrackerPort,
+        numTaskTrackers,
+        getFileSystem().getUri().toString(),
+        numTaskTrackers,
+        null,
+        null,
+        null,
+        jobConf);
+
+      jobConf = mrCluster.createJobConf();
+    } catch (IOException e) {
+      throw new IllegalStateException("Failed to Setup MR Cluster", e);
+    }
+  }
+
+  private void setupZookeeper() {
+    try {
+      zookeeperDir = new File(workDir, "zk").getAbsolutePath();
+      zookeeperPort = findFreePort();
+      zookeeperCluster = new MiniZooKeeperCluster();
+      zookeeperCluster.setDefaultClientPort(zookeeperPort);
+      zookeeperCluster.startup(new File(zookeeperDir));
+    } catch (Exception e) {
+      throw new IllegalStateException("Failed to Setup Zookeeper Cluster", e);
+    }
+  }
+
+  private void setupHBaseCluster() {
+    final int numRegionServers = 1;
+
+    try {
+      hbaseDir = new File(workDir, "hbase").getCanonicalPath();
+      hbaseDir = hbaseDir.replaceAll("\\\\", "/");
+      hbaseRoot = "file:///" + hbaseDir;
+
+      if (hbaseConf == null)
+        hbaseConf = HBaseConfiguration.create();
+
+      hbaseConf.set("hbase.rootdir", hbaseRoot);
+      hbaseConf.set("hbase.master", "local");
+      hbaseConf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zookeeperPort);
+      hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1");
+      hbaseConf.setInt("hbase.master.port", findFreePort());
+      hbaseConf.setInt("hbase.master.info.port", -1);
+      hbaseConf.setInt("hbase.regionserver.port", findFreePort());
+      hbaseConf.setInt("hbase.regionserver.info.port", -1);
+
+      hbaseCluster = new MiniHBaseCluster(hbaseConf, numRegionServers);
+      hbaseConf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort());
+      //opening the META table ensures that cluster is running
+      new HTable(hbaseConf, HConstants.META_TABLE_NAME);
+    } catch (Exception e) {
+      throw new IllegalStateException("Failed to setup HBase Cluster", e);
+    }
+  }
+
+  private void setUpMetastore() throws Exception {
+    if (hiveConf == null)
+      hiveConf = new HiveConf(this.getClass());
+
+    //The default org.apache.hadoop.hive.ql.hooks.PreExecutePrinter hook
+    //is present only in the ql/test directory
+    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+    hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
+      "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true");
+    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(),
+      new File(workDir, "warehouse").toString());
+    //set where derby logs
+    File derbyLogFile = new File(workDir + "/derby.log");
+    derbyLogFile.createNewFile();
+    System.setProperty("derby.stream.error.file", derbyLogFile.getPath());
+
+
+//    Driver driver = new Driver(hiveConf);
+//    SessionState.start(new CliSessionState(hiveConf));
+
+    hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket server = new ServerSocket(0);
+    int port = server.getLocalPort();
+    server.close();
+    return port;
+  }
+
+  public static class Builder {
+    private File workDir;
+    private int numTaskTrackers = 1;
+    private JobConf jobConf;
+    private Configuration hbaseConf;
+    private HiveConf hiveConf;
+
+    private boolean miniMRClusterEnabled = true;
+    private boolean miniHBaseClusterEnabled = true;
+    private boolean miniHiveMetastoreEnabled = true;
+    private boolean miniZookeeperClusterEnabled = true;
+
+
+    private Builder(File workDir) {
+      this.workDir = workDir;
+    }
+
+    public Builder numTaskTrackers(int num) {
+      numTaskTrackers = num;
+      return this;
+    }
+
+    public Builder jobConf(JobConf jobConf) {
+      this.jobConf = jobConf;
+      return this;
+    }
+
+    public Builder hbaseConf(Configuration hbaseConf) {
+      this.hbaseConf = hbaseConf;
+      return this;
+    }
+
+    public Builder hiveConf(HiveConf hiveConf) {
+      this.hiveConf = hiveConf;
+      return this;
+    }
+
+    public Builder miniMRClusterEnabled(boolean enabled) {
+      this.miniMRClusterEnabled = enabled;
+      return this;
+    }
+
+    public Builder miniHBaseClusterEnabled(boolean enabled) {
+      this.miniHBaseClusterEnabled = enabled;
+      return this;
+    }
+
+    public Builder miniZookeeperClusterEnabled(boolean enabled) {
+      this.miniZookeeperClusterEnabled = enabled;
+      return this;
+    }
+
+    public Builder miniHiveMetastoreEnabled(boolean enabled) {
+      this.miniHiveMetastoreEnabled = enabled;
+      return this;
+    }
+
+
+    public ManyMiniCluster build() {
+      return new ManyMiniCluster(this);
+    }
+
+  }
+}

Added: hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java?rev=1587230&view=auto
==============================================================================
--- hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java (added)
+++ hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java Mon Apr 14 15:55:57 2014
@@ -0,0 +1,237 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hive.hcatalog.hbase;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/**
+ * Base class for HBase Tests which need a mini cluster instance
+ */
+public abstract class SkeletonHBaseTest {
+
+  protected static String TEST_DIR = "/tmp/build/test/data/";
+
+  protected final static String DEFAULT_CONTEXT_HANDLE = "default";
+
+  protected static Map<String, Context> contextMap = new HashMap<String, Context>();
+  protected static Set<String> tableNames = new HashSet<String>();
+
+  /**
+   * Allow tests to alter the default MiniCluster configuration.
+   * (requires static initializer block as all setup here is static)
+   */
+  protected static Configuration testConf = null;
+
+  protected void createTable(String tableName, String[] families) {
+    try {
+      HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
+      HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+      for (String family : families) {
+        HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
+        tableDesc.addFamily(columnDescriptor);
+      }
+      admin.createTable(tableDesc);
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new IllegalStateException(e);
+    }
+
+  }
+
+  protected String newTableName(String prefix) {
+    String name = null;
+    int tries = 100;
+    do {
+      name = prefix + "_" + Math.abs(new Random().nextLong());
+    } while (tableNames.contains(name) && --tries > 0);
+    if (tableNames.contains(name))
+      throw new IllegalStateException("Couldn't find a unique table name, tableNames size: " + tableNames.size());
+    tableNames.add(name);
+    return name;
+  }
+
+
+  /**
+   * startup an hbase cluster instance before a test suite runs
+   */
+  @BeforeClass
+  public static void setup() {
+    if (!contextMap.containsKey(getContextHandle()))
+      contextMap.put(getContextHandle(), new Context(getContextHandle()));
+
+    contextMap.get(getContextHandle()).start();
+  }
+
+  /**
+   * shutdown an hbase cluster instance ant the end of the test suite
+   */
+  @AfterClass
+  public static void tearDown() {
+    contextMap.get(getContextHandle()).stop();
+  }
+
+  /**
+   * override this with a different context handle if tests suites are run simultaneously
+   * and ManyMiniCluster instances shouldn't be shared
+   * @return
+   */
+  public static String getContextHandle() {
+    return DEFAULT_CONTEXT_HANDLE;
+  }
+
+  /**
+   * @return working directory for a given test context, which normally is a test suite
+   */
+  public String getTestDir() {
+    return contextMap.get(getContextHandle()).getTestDir();
+  }
+
+  /**
+   * @return ManyMiniCluster instance
+   */
+  public ManyMiniCluster getCluster() {
+    return contextMap.get(getContextHandle()).getCluster();
+  }
+
+  /**
+   * @return configuration of MiniHBaseCluster
+   */
+  public Configuration getHbaseConf() {
+    return contextMap.get(getContextHandle()).getHbaseConf();
+  }
+
+  /**
+   * @return configuration of MiniMRCluster
+   */
+  public Configuration getJobConf() {
+    return contextMap.get(getContextHandle()).getJobConf();
+  }
+
+  /**
+   * @return configuration of Hive Metastore
+   */
+  public HiveConf getHiveConf() {
+    return contextMap.get(getContextHandle()).getHiveConf();
+  }
+
+  /**
+   * @return filesystem used by ManyMiniCluster daemons
+   */
+  public FileSystem getFileSystem() {
+    return contextMap.get(getContextHandle()).getFileSystem();
+  }
+
+  /**
+   * class used to encapsulate a context which is normally used by
+   * a single TestSuite or across TestSuites when multi-threaded testing is turned on
+   */
+  public static class Context {
+    protected String testDir;
+    protected ManyMiniCluster cluster;
+
+    protected Configuration hbaseConf;
+    protected Configuration jobConf;
+    protected HiveConf hiveConf;
+
+    protected FileSystem fileSystem;
+
+    protected int usageCount = 0;
+
+    public Context(String handle) {
+      testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getPath();
+      System.out.println("Cluster work directory: " + testDir);
+    }
+
+    public void start() {
+      if (usageCount++ == 0) {
+        ManyMiniCluster.Builder b = ManyMiniCluster.create(new File(testDir));
+        if (testConf != null) {
+          b.hbaseConf(HBaseConfiguration.create(testConf));
+        }
+        cluster = b.build();
+        cluster.start();
+        this.hbaseConf = cluster.getHBaseConf();
+        jobConf = cluster.getJobConf();
+        fileSystem = cluster.getFileSystem();
+        hiveConf = cluster.getHiveConf();
+      }
+    }
+
+    public void stop() {
+      if (--usageCount == 0) {
+        try {
+          cluster.stop();
+          cluster = null;
+        } finally {
+          System.out.println("Trying to cleanup: " + testDir);
+          try {
+            FileSystem fs = FileSystem.get(jobConf);
+            fs.delete(new Path(testDir), true);
+          } catch (IOException e) {
+            throw new IllegalStateException("Failed to cleanup test dir", e);
+          }
+
+        }
+      }
+    }
+
+    public String getTestDir() {
+      return testDir;
+    }
+
+    public ManyMiniCluster getCluster() {
+      return cluster;
+    }
+
+    public Configuration getHbaseConf() {
+      return hbaseConf;
+    }
+
+    public Configuration getJobConf() {
+      return jobConf;
+    }
+
+    public HiveConf getHiveConf() {
+      return hiveConf;
+    }
+
+    public FileSystem getFileSystem() {
+      return fileSystem;
+    }
+  }
+
+}

Modified: hive/trunk/packaging/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/packaging/pom.xml?rev=1587230&r1=1587229&r2=1587230&view=diff
==============================================================================
--- hive/trunk/packaging/pom.xml (original)
+++ hive/trunk/packaging/pom.xml Mon Apr 14 15:55:57 2014
@@ -138,11 +138,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive.hcatalog</groupId>
-      <artifactId>hive-hcatalog-hbase-storage-handler</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive.hcatalog</groupId>
       <artifactId>hive-hcatalog-core</artifactId>
       <version>${project.version}</version>
     </dependency>