You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/04 04:49:50 UTC
svn commit: r1622396 [1/8] - in /hive/branches/cbo: ./
accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/
beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/
common/src/java/or...
Author: gunther
Date: Thu Sep 4 02:49:46 2014
New Revision: 1622396
URL: http://svn.apache.org/r1622396
Log:
Merge latest trunk into cbo branch (Gunther Hagleitner)
Added:
hive/branches/cbo/bin/ext/hiveburninclient.sh
- copied unchanged from r1622394, hive/trunk/bin/ext/hiveburninclient.sh
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
- copied unchanged from r1622394, hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java
- copied unchanged from r1622394, hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java
- copied unchanged from r1622394, hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerWrapper.java
- copied unchanged from r1622394, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerWrapper.java
hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java
- copied unchanged from r1622394, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java
hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestStringDictionary.java
- copied unchanged from r1622394, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestStringDictionary.java
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_grant_group.q
- copied unchanged from r1622394, hive/trunk/ql/src/test/queries/clientnegative/authorization_grant_group.q
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_role_case.q
- copied unchanged from r1622394, hive/trunk/ql/src/test/queries/clientnegative/authorization_role_case.q
hive/branches/cbo/ql/src/test/queries/clientpositive/partition_char.q
- copied unchanged from r1622394, hive/trunk/ql/src/test/queries/clientpositive/partition_char.q
hive/branches/cbo/ql/src/test/queries/clientpositive/temp_table_display_colstats_tbllvl.q
- copied unchanged from r1622394, hive/trunk/ql/src/test/queries/clientpositive/temp_table_display_colstats_tbllvl.q
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_grant_group.q.out
- copied unchanged from r1622394, hive/trunk/ql/src/test/results/clientnegative/authorization_grant_group.q.out
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_role_case.q.out
- copied unchanged from r1622394, hive/trunk/ql/src/test/results/clientnegative/authorization_role_case.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/partition_char.q.out
- copied unchanged from r1622394, hive/trunk/ql/src/test/results/clientpositive/partition_char.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
- copied unchanged from r1622394, hive/trunk/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
- copied unchanged from r1622394, hive/trunk/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
hive/branches/cbo/testutils/src/java/org/apache/hive/testutils/jdbc/
- copied from r1622394, hive/trunk/testutils/src/java/org/apache/hive/testutils/jdbc/
Removed:
hive/branches/cbo/ql/src/test/queries/clientnegative/temp_table_column_stats.q
hive/branches/cbo/ql/src/test/results/clientnegative/temp_table_column_stats.q.out
Modified:
hive/branches/cbo/ (props changed)
hive/branches/cbo/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java
hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/AbstractCommandHandler.java
hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java
hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/CommandHandler.java
hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/ReflectiveCommandHandler.java
hive/branches/cbo/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
hive/branches/cbo/bin/hive
hive/branches/cbo/checkstyle/checkstyle.xml
hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/Validator.java
hive/branches/cbo/hbase-handler/pom.xml (contents, props changed)
hive/branches/cbo/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
hive/branches/cbo/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/branches/cbo/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
hive/branches/cbo/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java
hive/branches/cbo/itests/src/test/resources/testconfiguration.properties
hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
hive/branches/cbo/pom.xml
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionTableFunctionDescription.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionDescription.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IdentityExpression.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeTask.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ColStatistics.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLpad.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRpad.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java
hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_create.q
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_drop.q
hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_grant_public.q
hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_revoke_public.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_1.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_5.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_grant_public_role.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_role_grant2.q
hive/branches/cbo/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_create.q.out
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_drop.q.out
hive/branches/cbo/ql/src/test/results/clientnegative/authorize_grant_public.q.out
hive/branches/cbo/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_1.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_5.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_role_grant2.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/show_conf.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out
hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java
hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java
hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
hive/branches/cbo/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/SaslQOP.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
hive/branches/cbo/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/OperationState.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/Operation.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
hive/branches/cbo/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
hive/branches/cbo/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
Propchange: hive/branches/cbo/
------------------------------------------------------------------------------
Merged /hive/trunk:r1621024-1622394
Modified: hive/branches/cbo/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java (original)
+++ hive/branches/cbo/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java Thu Sep 4 02:49:46 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector;
@@ -57,7 +58,7 @@ import org.slf4j.LoggerFactory;
*
*/
public class AccumuloRangeGenerator implements NodeProcessor {
- private static final Logger log = LoggerFactory.getLogger(AccumuloRangeGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AccumuloRangeGenerator.class);
private final AccumuloPredicateHandler predicateHandler;
private final HiveAccumuloRowIdColumnMapping rowIdMapping;
@@ -162,7 +163,7 @@ public class AccumuloRangeGenerator impl
andRanges = newRanges;
}
} else {
- log.error("Expected Range from {} but got {}", nd, nodeOutput);
+ LOG.error("Expected Range from {} but got {}", nd, nodeOutput);
throw new IllegalArgumentException("Expected Range but got "
+ nodeOutput.getClass().getName());
}
@@ -181,7 +182,7 @@ public class AccumuloRangeGenerator impl
List<Range> childRanges = (List<Range>) nodeOutput;
orRanges.addAll(childRanges);
} else {
- log.error("Expected Range from " + nd + " but got " + nodeOutput);
+ LOG.error("Expected Range from {} but got {}", nd, nodeOutput);
throw new IllegalArgumentException("Expected Range but got "
+ nodeOutput.getClass().getName());
}
@@ -324,27 +325,9 @@ public class AccumuloRangeGenerator impl
*/
protected Text getBinaryValue(ConstantObjectInspector objInspector) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
- if (objInspector instanceof WritableConstantBooleanObjectInspector) {
+ if (objInspector instanceof PrimitiveObjectInspector) {
LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantBooleanObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantByteObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantByteObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantShortObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantShortObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantIntObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantIntObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantLongObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantLongObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantDoubleObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantDoubleObjectInspector) objInspector);
- } else if (objInspector instanceof WritableConstantFloatObjectInspector) {
- LazyUtils.writePrimitive(out, objInspector.getWritableConstantValue(),
- (WritableConstantDoubleObjectInspector) objInspector);
+ (PrimitiveObjectInspector) objInspector);
} else {
return getUtf8Value(objInspector);
}
Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/AbstractCommandHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/AbstractCommandHandler.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/AbstractCommandHandler.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/AbstractCommandHandler.java Thu Sep 4 02:49:46 2014
@@ -40,6 +40,7 @@ public abstract class AbstractCommandHan
private final String helpText;
private Completor[] parameterCompletors = new Completor[0];
+ protected transient Throwable lastException;
public AbstractCommandHandler(BeeLine beeLine, String[] names, String helpText,
Completor[] completors) {
@@ -101,4 +102,9 @@ public abstract class AbstractCommandHan
public Completor[] getParameterCompletors() {
return parameterCompletors;
}
+
+ @Override
+ public Throwable getLastException() {
+ return lastException;
+ }
}
Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java Thu Sep 4 02:49:46 2014
@@ -57,7 +57,6 @@ import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
@@ -618,9 +617,9 @@ public class BeeLine implements Closeabl
}
- boolean initArgs(String[] args) {
- List<String> commands = new LinkedList<String>();
- List<String> files = new LinkedList<String>();
+ int initArgs(String[] args) {
+ List<String> commands = Collections.emptyList();
+ List<String> files = Collections.emptyList();
CommandLine cl;
BeelineParser beelineParser;
@@ -630,7 +629,8 @@ public class BeeLine implements Closeabl
cl = beelineParser.parse(options, args);
} catch (ParseException e1) {
output(e1.getMessage());
- return false;
+ usage();
+ return -1;
}
String driver = null, user = null, pass = null, url = null;
@@ -638,8 +638,8 @@ public class BeeLine implements Closeabl
if (cl.hasOption("help")) {
- // Return false here, so usage will be printed.
- return false;
+ usage();
+ return 0;
}
Properties hiveVars = cl.getOptionProperties("hivevar");
@@ -690,7 +690,8 @@ public class BeeLine implements Closeabl
dispatch("!properties " + i.next());
}
- if (commands.size() > 0) {
+ int code = 0;
+ if (!commands.isEmpty()) {
// for single command execute, disable color
getOpts().setColor(false);
getOpts().setHeaderInterval(-1);
@@ -698,11 +699,13 @@ public class BeeLine implements Closeabl
for (Iterator<String> i = commands.iterator(); i.hasNext();) {
String command = i.next().toString();
debug(loc("executing-command", command));
- dispatch(command);
+ if (!dispatch(command)) {
+ code++;
+ }
}
exit = true; // execute and exit
}
- return true;
+ return code;
}
@@ -720,9 +723,9 @@ public class BeeLine implements Closeabl
}
try {
- if (!initArgs(args)) {
- usage();
- return ERRNO_ARGS;
+ int code = initArgs(args);
+ if (code != 0) {
+ return code;
}
if (getOpts().getScriptFile() != null) {
Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/CommandHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/CommandHandler.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/CommandHandler.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/CommandHandler.java Thu Sep 4 02:49:46 2014
@@ -74,4 +74,10 @@ interface CommandHandler {
* Returns the completors that can handle parameters.
*/
public Completor[] getParameterCompletors();
+
+ /**
+ * Returns exception thrown for last command
+ * @return
+ */
+ public Throwable getLastException();
}
\ No newline at end of file
Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java Thu Sep 4 02:49:46 2014
@@ -287,7 +287,7 @@ public class HiveSchemaTool {
}
} catch (IOException e) {
throw new HiveMetaException("Schema initialization FAILED!" +
- " Metastore state would be inconsistent !!", e);
+ " Metastore state would be inconsistent !!", e);
}
}
Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/ReflectiveCommandHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/ReflectiveCommandHandler.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/ReflectiveCommandHandler.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/ReflectiveCommandHandler.java Thu Sep 4 02:49:46 2014
@@ -40,6 +40,7 @@ public class ReflectiveCommandHandler ex
}
public boolean execute(String line) {
+ lastException = null;
try {
Object ob = beeLine.getCommands().getClass().getMethod(getName(),
new Class[] {String.class})
@@ -47,6 +48,7 @@ public class ReflectiveCommandHandler ex
return ob != null && ob instanceof Boolean
&& ((Boolean) ob).booleanValue();
} catch (Throwable e) {
+ lastException = e;
return beeLine.error(e);
}
}
Modified: hive/branches/cbo/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java (original)
+++ hive/branches/cbo/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java Thu Sep 4 02:49:46 2014
@@ -56,7 +56,7 @@ public class TestBeelineArgParsing {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-a", "authType"};
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAuthType().equals("authType"));
}
@@ -69,7 +69,7 @@ public class TestBeelineArgParsing {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-u", "url2", "-n", "name",
"-p", "password", "-d", "driver"};
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
}
@@ -78,7 +78,7 @@ public class TestBeelineArgParsing {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-e", "select1", "-e", "select2"};
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.queries.contains("select1"));
Assert.assertTrue(bl.queries.contains("select2"));
@@ -93,7 +93,7 @@ public class TestBeelineArgParsing {
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "--hiveconf", "a=avalue", "--hiveconf", "b=bvalue",
"--hivevar", "c=cvalue", "--hivevar", "d=dvalue"};
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("a").equals("avalue"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("b").equals("bvalue"));
@@ -107,7 +107,7 @@ public class TestBeelineArgParsing {
String args[] =
new String[] { "-u", "url", "-n", "name", "-p", "password", "-d", "driver",
"--autoCommit=true", "--verbose", "--truncateTable" };
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAutoCommit());
Assert.assertTrue(bl.getOpts().getVerbose());
@@ -122,7 +122,7 @@ public class TestBeelineArgParsing {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-f", "myscript"};
- Assert.assertTrue(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getScriptFile().equals("myscript"));
}
@@ -134,7 +134,7 @@ public class TestBeelineArgParsing {
public void testHelp() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"--help"};
- Assert.assertFalse(bl.initArgs(args));
+ Assert.assertEquals(0, bl.initArgs(args));
}
/**
@@ -144,7 +144,7 @@ public class TestBeelineArgParsing {
public void testUnmatchedArgs() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n"};
- Assert.assertFalse(bl.initArgs(args));
+ Assert.assertEquals(-1, bl.initArgs(args));
}
}
Modified: hive/branches/cbo/bin/hive
URL: http://svn.apache.org/viewvc/hive/branches/cbo/bin/hive?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/bin/hive (original)
+++ hive/branches/cbo/bin/hive Thu Sep 4 02:49:46 2014
@@ -99,7 +99,8 @@ done
# add the auxillary jars such as serdes
if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
- for f in ${HIVE_AUX_JARS_PATH}/*.jar; do
+ hive_aux_jars_abspath=`cd ${HIVE_AUX_JARS_PATH} && pwd`
+ for f in $hive_aux_jars_abspath/*.jar; do
if [[ ! -f $f ]]; then
continue;
fi
Modified: hive/branches/cbo/checkstyle/checkstyle.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/checkstyle/checkstyle.xml?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/checkstyle/checkstyle.xml (original)
+++ hive/branches/cbo/checkstyle/checkstyle.xml Thu Sep 4 02:49:46 2014
@@ -65,7 +65,7 @@
<module name="Translation"/>
<module name="Header">
- <property name="headerFile" value="${basedir}/checkstyle/asf.header"/>
+ <property name="headerFile" value="checkstyle/asf.header"/>
</module>
<!-- Maximum file line length -->
@@ -78,7 +78,7 @@
<!-- List of files to ignore -->
<module name="SuppressionFilter">
- <property name="file" value="${basedir}/checkstyle/suppressions.xml"/>
+ <property name="file" value="checkstyle/suppressions.xml"/>
</module>
<!-- Ignore JavaCC/JJTree files -->
@@ -178,7 +178,6 @@
<!-- Checks for common coding problems -->
<!-- See http://checkstyle.sf.net/config_coding.html -->
<!-- module name="AvoidInlineConditionals"/-->
- <module name="DoubleCheckedLocking"/>
<module name="EmptyStatement"/>
<module name="EqualsAvoidNull"/>
<module name="EqualsHashCode"/>
Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Thu Sep 4 02:49:46 2014
@@ -24,7 +24,6 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.security.AccessControlException;
import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
@@ -34,12 +33,10 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus;
@@ -628,4 +625,62 @@ public final class FileUtils {
//Once equality has been added in HDFS-4321, we should make use of it
return fs1.getUri().equals(fs2.getUri());
}
+
+ /**
+ * Checks if delete can be performed on given path by given user.
+ * If file does not exist it just returns without throwing an Exception
+ * @param path
+ * @param conf
+ * @param user
+ * @throws AccessControlException
+ * @throws InterruptedException
+ * @throws Exception
+ */
+ public static void checkDeletePermission(Path path, Configuration conf, String user)
+ throws AccessControlException, InterruptedException, Exception {
+ // This requires ability to delete the given path.
+ // The following 2 conditions should be satisfied for this-
+ // 1. Write permissions on parent dir
+ // 2. If sticky bit is set on parent dir then one of following should be
+ // true
+ // a. User is owner of the current dir/file
+ // b. User is owner of the parent dir
+ // Super users are also allowed to drop the file, but there is no good way of checking
+ // if a user is a super user. Also super users running hive queries is not a common
+ // use case. super users can also do a chown to be able to drop the file
+
+ final FileSystem fs = path.getFileSystem(conf);
+ if (!fs.exists(path)) {
+ // no file/dir to be deleted
+ return;
+ }
+ Path parPath = path.getParent();
+ // check user has write permissions on the parent dir
+ FileStatus stat = fs.getFileStatus(path);
+ FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user);
+
+ // check if sticky bit is set on the parent dir
+ FileStatus parStatus = fs.getFileStatus(parPath);
+ if (!parStatus.getPermission().getStickyBit()) {
+ // no sticky bit, so write permission on parent dir is sufficient
+ // no further checks needed
+ return;
+ }
+
+ // check if user is owner of parent dir
+ if (parStatus.getOwner().equals(user)) {
+ return;
+ }
+
+ // check if user is owner of current dir/file
+ FileStatus childStatus = fs.getFileStatus(path);
+ if (childStatus.getOwner().equals(user)) {
+ return;
+ }
+ String msg = String.format("Permission Denied: User %s can't delete %s because sticky bit is"
+ + " set on the parent dir and user does not own this file or its parent", user, path);
+ throw new IOException(msg);
+
+ }
+
}
Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java Thu Sep 4 02:49:46 2014
@@ -116,8 +116,7 @@ public class StatsSetupConst {
/**
* @return List of all supported statistics
*/
- public static final String[] supportedStats = new String[]
- {NUM_FILES,ROW_COUNT,TOTAL_SIZE,RAW_DATA_SIZE};
+ public static final String[] supportedStats = {NUM_FILES,ROW_COUNT,TOTAL_SIZE,RAW_DATA_SIZE};
/**
* @return List of all statistics that need to be collected during query execution. These are
@@ -142,8 +141,8 @@ public class StatsSetupConst {
public static final String FALSE = "false";
- public static boolean areStatsUptoDate(Map<String,String> params) {
- String statsAcc = params.get(COLUMN_STATS_ACCURATE);
- return statsAcc == null ? false : statsAcc.equals(TRUE);
+ public static boolean areStatsUptoDate(Map<String, String> params) {
+ String statsAcc = params.get(COLUMN_STATS_ACCURATE);
+ return statsAcc == null ? false : statsAcc.equals(TRUE);
}
}
Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Sep 4 02:49:46 2014
@@ -31,6 +31,7 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
+import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -43,7 +44,9 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.conf.Validator.PatternSet;
import org.apache.hadoop.hive.conf.Validator.RangeValidator;
+import org.apache.hadoop.hive.conf.Validator.RatioValidator;
import org.apache.hadoop.hive.conf.Validator.StringSet;
+import org.apache.hadoop.hive.conf.Validator.TimeValidator;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
@@ -352,9 +355,11 @@ public class HiveConf extends Configurat
METASTORETHRIFTFAILURERETRIES("hive.metastore.failure.retries", 1,
"Number of retries upon failure of Thrift metastore calls"),
- METASTORE_CLIENT_CONNECT_RETRY_DELAY("hive.metastore.client.connect.retry.delay", 1,
+ METASTORE_CLIENT_CONNECT_RETRY_DELAY("hive.metastore.client.connect.retry.delay", "1s",
+ new TimeValidator(TimeUnit.SECONDS),
"Number of seconds for the client to wait between consecutive connection attempts"),
- METASTORE_CLIENT_SOCKET_TIMEOUT("hive.metastore.client.socket.timeout", 600,
+ METASTORE_CLIENT_SOCKET_TIMEOUT("hive.metastore.client.socket.timeout", "600s",
+ new TimeValidator(TimeUnit.SECONDS),
"MetaStore Client socket timeout in seconds"),
METASTOREPWD("javax.jdo.option.ConnectionPassword", "mine",
"password to use against metastore database"),
@@ -367,9 +372,9 @@ public class HiveConf extends Configurat
"JDBC connect string for a JDBC metastore"),
HMSHANDLERATTEMPTS("hive.hmshandler.retry.attempts", 1,
- "The number of times to retry a HMSHandler call if there were a connection error"),
- HMSHANDLERINTERVAL("hive.hmshandler.retry.interval", 1000,
- "The number of milliseconds between HMSHandler retry attempts"),
+ "The number of times to retry a HMSHandler call if there were a connection error."),
+ HMSHANDLERINTERVAL("hive.hmshandler.retry.interval", "1000ms",
+ new TimeValidator(TimeUnit.MILLISECONDS), "The time between HMSHandler retry attempts on failure."),
HMSHANDLERFORCERELOADCONF("hive.hmshandler.force.reload.conf", false,
"Whether to force reloading of the HMSHandler configuration (including\n" +
"the connection URL, before the next metastore query that accesses the\n" +
@@ -464,10 +469,12 @@ public class HiveConf extends Configurat
"for operations like drop-partition (disallow the drop-partition if the user in\n" +
"question doesn't have permissions to delete the corresponding directory\n" +
"on the storage)."),
- METASTORE_EVENT_CLEAN_FREQ("hive.metastore.event.clean.freq", 0L,
- "Frequency at which timer task runs to purge expired events in metastore(in seconds)."),
- METASTORE_EVENT_EXPIRY_DURATION("hive.metastore.event.expiry.duration", 0L,
- "Duration after which events expire from events table (in seconds)"),
+ METASTORE_EVENT_CLEAN_FREQ("hive.metastore.event.clean.freq", "0s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Frequency at which timer task runs to purge expired events in metastore."),
+ METASTORE_EVENT_EXPIRY_DURATION("hive.metastore.event.expiry.duration", "0s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Duration after which events expire from events table"),
METASTORE_EXECUTE_SET_UGI("hive.metastore.execute.setugi", true,
"In unsecure mode, setting this property to true will cause the metastore to execute DFS operations using \n" +
"the client's reported user and group permissions. Note that this property must be set on \n" +
@@ -579,8 +586,9 @@ public class HiveConf extends Configurat
HIVE_CURRENT_DATABASE("hive.current.database", "", "Database name used by current session. Internal usage only.", true),
// for hive script operator
- HIVES_AUTO_PROGRESS_TIMEOUT("hive.auto.progress.timeout", 0,
- "How long to run autoprogressor for the script/UDTF operators (in seconds).\n" +
+ HIVES_AUTO_PROGRESS_TIMEOUT("hive.auto.progress.timeout", "0s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "How long to run autoprogressor for the script/UDTF operators.\n" +
"Set to 0 for forever."),
HIVETABLENAME("hive.table.name", "", ""),
HIVEPARTITIONNAME("hive.partition.name", "", ""),
@@ -692,10 +700,9 @@ public class HiveConf extends Configurat
"because this may prevent TaskTracker from killing tasks with infinite loops."),
HIVEDEFAULTFILEFORMAT("hive.default.fileformat", "TextFile", new StringSet("TextFile", "SequenceFile", "RCfile", "ORC"),
- "Default file format for CREATE TABLE statement. \n" +
- "Options are TextFile, SequenceFile, RCfile and ORC. Users can explicitly override it by CREATE TABLE ... STORED AS [FORMAT]"),
+ "Default file format for CREATE TABLE statement. Users can explicitly override it by CREATE TABLE ... STORED AS [FORMAT]"),
HIVEQUERYRESULTFILEFORMAT("hive.query.result.fileformat", "TextFile", new StringSet("TextFile", "SequenceFile", "RCfile"),
- "Default file format for storing result of the query. Allows TextFile, SequenceFile and RCfile"),
+ "Default file format for storing result of the query."),
HIVECHECKFILEFORMAT("hive.fileformat.check", true, "Whether to check file format or not when loading data files"),
// default serde for rcfile
@@ -722,8 +729,9 @@ public class HiveConf extends Configurat
"Whether to log the plan's progress every time a job's progress is checked.\n" +
"These logs are written to the location specified by hive.querylog.location"),
- HIVE_LOG_INCREMENTAL_PLAN_PROGRESS_INTERVAL("hive.querylog.plan.progress.interval", 60000L,
- "The interval to wait between logging the plan's progress in milliseconds.\n" +
+ HIVE_LOG_INCREMENTAL_PLAN_PROGRESS_INTERVAL("hive.querylog.plan.progress.interval", "60000ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "The interval to wait between logging the plan's progress.\n" +
"If there is a whole number percentage change in the progress of the mappers or the reducers,\n" +
"the progress is logged regardless of this value.\n" +
"The actual interval will be the ceiling of (this value divided by the value of\n" +
@@ -802,7 +810,7 @@ public class HiveConf extends Configurat
" config (hive.exec.orc.block.padding.tolerance)."),
HIVEMERGEINPUTFORMATSTRIPELEVEL("hive.merge.input.format.stripe.level",
"org.apache.hadoop.hive.ql.io.orc.OrcFileStripeMergeInputFormat",
- "Input file format to use for ORC stripe level merging (for internal use only)"),
+ "Input file format to use for ORC stripe level merging (for internal use only)"),
HIVEMERGECURRENTJOBHASDYNAMICPARTITIONS(
"hive.merge.current.job.has.dynamic.partitions", false, ""),
@@ -831,6 +839,10 @@ public class HiveConf extends Configurat
"If the number of keys in a dictionary is greater than this fraction of the total number of\n" +
"non-null rows, turn off dictionary encoding. Use 1 to always use dictionary encoding."),
HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE("hive.exec.orc.default.row.index.stride", 10000, "Define the default ORC index stride"),
+ HIVE_ORC_ROW_INDEX_STRIDE_DICTIONARY_CHECK("hive.orc.row.index.stride.dictionary.check", true,
+ "If enabled dictionary check will happen after first row index stride (default 10000 rows)\n" +
+ "else dictionary check will happen before writing first stripe. In both cases, the decision\n" +
+ "to use dictionary or not will be retained thereafter."),
HIVE_ORC_DEFAULT_BUFFER_SIZE("hive.exec.orc.default.buffer.size", 256 * 1024, "Define the default ORC buffer size"),
HIVE_ORC_DEFAULT_BLOCK_PADDING("hive.exec.orc.default.block.padding", true, "Define the default block padding"),
HIVE_ORC_BLOCK_PADDING_TOLERANCE("hive.exec.orc.block.padding.tolerance", 0.05f,
@@ -843,8 +855,7 @@ public class HiveConf extends Configurat
HIVE_ORC_ENCODING_STRATEGY("hive.exec.orc.encoding.strategy", "SPEED", new StringSet("SPEED", "COMPRESSION"),
"Define the encoding strategy to use while writing data. Changing this will\n" +
"only affect the light weight encoding for integers. This flag will not\n" +
- "change the compression level of higher level compression codec (like ZLIB).\n" +
- "Possible options are SPEED and COMPRESSION."),
+ "change the compression level of higher level compression codec (like ZLIB)."),
HIVE_ORC_INCLUDE_FILE_FOOTER_IN_SPLITS("hive.orc.splits.include.file.footer", false,
"If turned on splits generated by orc will include metadata about the stripes in the file. This\n" +
@@ -1032,9 +1043,11 @@ public class HiveConf extends Configurat
"When enabled dynamic partitioning column will be globally sorted.\n" +
"This way we can keep only one record writer open for each partition value\n" +
"in the reducer thereby reducing the memory pressure on reducers."),
- HIVESAMPLINGFORORDERBY("hive.optimize.sampling.orderby", false, ""),
- HIVESAMPLINGNUMBERFORORDERBY("hive.optimize.sampling.orderby.number", 1000, ""),
- HIVESAMPLINGPERCENTFORORDERBY("hive.optimize.sampling.orderby.percent", 0.1f, ""),
+
+ HIVESAMPLINGFORORDERBY("hive.optimize.sampling.orderby", false, "Uses sampling on order-by clause for parallel execution."),
+ HIVESAMPLINGNUMBERFORORDERBY("hive.optimize.sampling.orderby.number", 1000, "Total number of samples to be obtained."),
+ HIVESAMPLINGPERCENTFORORDERBY("hive.optimize.sampling.orderby.percent", 0.1f, new RatioValidator(),
+ "Probability with which a row will be chosen."),
// whether to optimize union followed by select followed by filesink
// It creates sub-directories in the final output, so should not be turned on in systems
@@ -1101,16 +1114,17 @@ public class HiveConf extends Configurat
"The Java class (implementing the StatsPublisher interface) that is used by default if hive.stats.dbclass is custom type."),
HIVE_STATS_DEFAULT_AGGREGATOR("hive.stats.default.aggregator", "",
"The Java class (implementing the StatsAggregator interface) that is used by default if hive.stats.dbclass is custom type."),
- HIVE_STATS_JDBC_TIMEOUT("hive.stats.jdbc.timeout", 30,
- "Timeout value (number of seconds) used by JDBC connection and statements."),
+ HIVE_STATS_JDBC_TIMEOUT("hive.stats.jdbc.timeout", "30s", new TimeValidator(TimeUnit.SECONDS),
+ "Timeout value used by JDBC connection and statements."),
HIVE_STATS_ATOMIC("hive.stats.atomic", false,
"whether to update metastore stats only if all stats are available"),
HIVE_STATS_RETRIES_MAX("hive.stats.retries.max", 0,
"Maximum number of retries when stats publisher/aggregator got an exception updating intermediate database. \n" +
"Default is no tries on failures."),
- HIVE_STATS_RETRIES_WAIT("hive.stats.retries.wait", 3000,
- "The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by " +
- "baseWindow * failures baseWindow * (failure 1) * (random number between [0.0,1.0])."),
+ HIVE_STATS_RETRIES_WAIT("hive.stats.retries.wait", "3000ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "The base waiting window before the next retry. The actual wait time is calculated by " +
+ "baseWindow * failures baseWindow * (failure + 1) * (random number between [0.0,1.0])."),
HIVE_STATS_COLLECT_RAWDATASIZE("hive.stats.collect.rawdatasize", true,
"should the raw data size be collected when analyzing tables"),
CLIENT_STATS_COUNTERS("hive.client.stats.counters", "",
@@ -1220,8 +1234,9 @@ public class HiveConf extends Configurat
"The number of times you want to try to get all the locks"),
HIVE_UNLOCK_NUMRETRIES("hive.unlock.numretries", 10,
"The number of times you want to retry to do one unlock"),
- HIVE_LOCK_SLEEP_BETWEEN_RETRIES("hive.lock.sleep.between.retries", 60,
- "The sleep time (in seconds) between various retries"),
+ HIVE_LOCK_SLEEP_BETWEEN_RETRIES("hive.lock.sleep.between.retries", "60s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "The sleep time between various retries"),
HIVE_LOCK_MAPRED_ONLY("hive.lock.mapred.only.operation", false,
"This param is to control whether or not only do lock on queries\n" +
"that need to execute at least one mapred job."),
@@ -1241,8 +1256,8 @@ public class HiveConf extends Configurat
// Transactions
HIVE_TXN_MANAGER("hive.txn.manager",
"org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager", ""),
- HIVE_TXN_TIMEOUT("hive.txn.timeout", 300,
- "time after which transactions are declared aborted if the client has not sent a heartbeat, in seconds."),
+ HIVE_TXN_TIMEOUT("hive.txn.timeout", "300s", new TimeValidator(TimeUnit.SECONDS),
+ "time after which transactions are declared aborted if the client has not sent a heartbeat."),
HIVE_TXN_MAX_OPEN_BATCH("hive.txn.max.open.batch", 1000,
"Maximum number of transactions that can be fetched in one call to open_txns().\n" +
@@ -1256,12 +1271,14 @@ public class HiveConf extends Configurat
HIVE_COMPACTOR_WORKER_THREADS("hive.compactor.worker.threads", 0,
"Number of compactor worker threads to run on this metastore instance."),
- HIVE_COMPACTOR_WORKER_TIMEOUT("hive.compactor.worker.timeout", 86400L,
- "Time in seconds, before a given compaction in working state is declared a failure\n" +
+ HIVE_COMPACTOR_WORKER_TIMEOUT("hive.compactor.worker.timeout", "86400s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Time before a given compaction in working state is declared a failure\n" +
"and returned to the initiated state."),
- HIVE_COMPACTOR_CHECK_INTERVAL("hive.compactor.check.interval", 300L,
- "Time in seconds between checks to see if any partitions need compacted.\n" +
+ HIVE_COMPACTOR_CHECK_INTERVAL("hive.compactor.check.interval", "300s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Time between checks to see if any partitions need compacted.\n" +
"This should be kept high because each check for compaction requires many calls against the NameNode."),
HIVE_COMPACTOR_DELTA_NUM_THRESHOLD("hive.compactor.delta.num.threshold", 10,
@@ -1298,7 +1315,7 @@ public class HiveConf extends Configurat
"Currently the query should be single sourced not having any subquery and should not have\n" +
"any aggregations or distincts (which incurs RS), lateral views and joins.\n" +
"1. minimal : SELECT STAR, FILTER on partition columns, LIMIT only\n" +
- "2. more : SELECT, FILTER, LIMIT only (support TABLESAMPLE and virtual columns)\n"
+ "2. more : SELECT, FILTER, LIMIT only (support TABLESAMPLE and virtual columns)"
),
HIVEFETCHTASKCONVERSIONTHRESHOLD("hive.fetch.task.conversion.threshold", 1073741824L,
"Input threshold for applying hive.fetch.task.conversion. If target table is native, input length\n" +
@@ -1470,12 +1487,12 @@ public class HiveConf extends Configurat
"table. From 0.12 onwards, they are displayed separately. This flag will let you\n" +
"get old behavior, if desired. See, test-case in patch for HIVE-6689."),
- HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L, new RangeValidator(0L, Long.MAX_VALUE),
+ HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L, new RangeValidator(0L, null),
"This number of times HiveServer2 will attempt to start before exiting, sleeping 60 seconds between retries. \n" +
"The default of 30 will keep trying for 30 minutes."),
HIVE_SERVER2_TRANSPORT_MODE("hive.server2.transport.mode", "binary", new StringSet("binary", "http"),
- "Server transport mode. \"binary\" or \"http\""),
+ "Transport mode of HiveServer2."),
// http (over thrift) transport settings
HIVE_SERVER2_THRIFT_HTTP_PORT("hive.server2.thrift.http.port", 10001,
@@ -1486,11 +1503,13 @@ public class HiveConf extends Configurat
"Minimum number of worker threads when in HTTP mode."),
HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads", 500,
"Maximum number of worker threads when in HTTP mode."),
- HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time", 1800000,
- "Maximum idle time in milliseconds for a connection on the server when in HTTP mode."),
- HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME("hive.server2.thrift.http.worker.keepalive.time", 60,
- "Keepalive time (in seconds) for an idle http worker thread. When number of workers > min workers, " +
- "excess threads are killed after this time interval."),
+ HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time", "1800s",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "Maximum idle time for a connection on the server when in HTTP mode."),
+ HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME("hive.server2.thrift.http.worker.keepalive.time", "60s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Keepalive time for an idle http worker thread. When the number of workers exceeds min workers, " +
+ "excessive threads are killed after this time interval."),
// binary transport settings
HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000,
@@ -1513,23 +1532,26 @@ public class HiveConf extends Configurat
"Minimum number of Thrift worker threads"),
HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 500,
"Maximum number of Thrift worker threads"),
- HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME("hive.server2.thrift.worker.keepalive.time", 60,
- "Keepalive time (in seconds) for an idle worker thread. When number of workers > min workers, " +
- "excess threads are killed after this time interval."),
+ HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME("hive.server2.thrift.worker.keepalive.time", "60s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Keepalive time (in seconds) for an idle worker thread. When the number of workers exceeds min workers, " +
+ "excessive threads are killed after this time interval."),
// Configuration for async thread pool in SessionManager
HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 100,
"Number of threads in the async thread pool for HiveServer2"),
- HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10,
- "Time (in seconds) for which HiveServer2 shutdown will wait for async"),
+ HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", "10s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Maximum time for which HiveServer2 shutdown will wait for async"),
HIVE_SERVER2_ASYNC_EXEC_WAIT_QUEUE_SIZE("hive.server2.async.exec.wait.queue.size", 100,
"Size of the wait queue for async thread pool in HiveServer2.\n" +
"After hitting this limit, the async thread pool will reject new requests."),
- HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME("hive.server2.async.exec.keepalive.time", 10,
- "Time (in seconds) that an idle HiveServer2 async thread (from the thread pool) will wait\n" +
- "for a new task to arrive before terminating"),
- HIVE_SERVER2_LONG_POLLING_TIMEOUT("hive.server2.long.polling.timeout", 5000L,
- "Time in milliseconds that HiveServer2 will wait,\n" +
- "before responding to asynchronous calls that use long polling"),
+ HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME("hive.server2.async.exec.keepalive.time", "10s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Time that an idle HiveServer2 async thread (from the thread pool) will wait for a new task\n" +
+ "to arrive before terminating"),
+ HIVE_SERVER2_LONG_POLLING_TIMEOUT("hive.server2.long.polling.timeout", "5000ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "Time that HiveServer2 will wait before responding to asynchronous calls that use long polling"),
// HiveServer2 auth configuration
HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE",
@@ -1572,8 +1594,8 @@ public class HiveConf extends Configurat
"must be a proper implementation of the interface\n" +
"org.apache.hive.service.auth.PasswdAuthenticationProvider. HiveServer2\n" +
"will call its Authenticate(user, passed) method to authenticate requests.\n" +
- "The implementation may optionally extend Hadoop's\n" +
- "org.apache.hadoop.conf.Configured class to grab Hive's Configuration object."),
+ "The implementation may optionally implement Hadoop's\n" +
+ "org.apache.hadoop.conf.Configurable class to grab Hive's Configuration object."),
HIVE_SERVER2_PAM_SERVICES("hive.server2.authentication.pam.services", null,
"List of the underlying pam services that should be used when auth type is PAM\n" +
"A file with the same name must exist in /etc/pam.d"),
@@ -1594,6 +1616,18 @@ public class HiveConf extends Configurat
HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,compile",
"Comma separated list of non-SQL Hive commands users are authorized to execute"),
+ HIVE_SERVER2_SESSION_CHECK_INTERVAL("hive.server2.session.check.interval", "0ms",
+ new TimeValidator(TimeUnit.MILLISECONDS, 3000l, true, null, false),
+ "The check interval for session/operation timeout, which can be disabled by setting to zero or negative value."),
+ HIVE_SERVER2_IDLE_SESSION_TIMEOUT("hive.server2.idle.session.timeout", "0ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "Session will be closed when it's not accessed for this duration, which can be disabled by setting to zero or negative value."),
+ HIVE_SERVER2_IDLE_OPERATION_TIMEOUT("hive.server2.idle.operation.timeout", "0ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "Operation will be closed when it's not accessed for this duration of time, which can be disabled by setting to zero value.\n" +
+ " With positive value, it's checked for operations in terminal state only (FINISHED, CANCELED, CLOSED, ERROR).\n" +
+ " With negative value, it's checked for all of the operations regardless of state."),
+
HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list",
"hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role",
"Comma separated list of configuration options which are immutable at runtime"),
@@ -1651,8 +1685,9 @@ public class HiveConf extends Configurat
"Enable list bucketing optimizer. Default value is false so that we disable it by default."),
// Allow TCP Keep alive socket option for for HiveServer or a maximum timeout for the socket.
- SERVER_READ_SOCKET_TIMEOUT("hive.server.read.socket.timeout", 10,
- "Timeout for the HiveServer to close the connection if no response from the client in N seconds, defaults to 10 seconds."),
+ SERVER_READ_SOCKET_TIMEOUT("hive.server.read.socket.timeout", "10s",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Timeout for the HiveServer to close the connection if no response from the client. By default, 10 seconds."),
SERVER_TCP_KEEP_ALIVE("hive.server.tcp.keepalive", true,
"Whether to enable TCP keepalive for the Hive Server. Keepalive will prevent accumulation of half-open connections."),
@@ -1711,8 +1746,9 @@ public class HiveConf extends Configurat
"turning on Tez for HiveServer2. The user could potentially want to run queries\n" +
"over Tez without the pool of sessions."),
- HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "column", new PatternSet("none", "column"),
- "Whether to use quoted identifier. 'none' ot 'column' can be used. \n" +
+ HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "column",
+ new StringSet("none", "column"),
+ "Whether to use quoted identifier. 'none' or 'column' can be used. \n" +
" none: default(past) behavior. Implies only alphaNumeric and underscore are valid characters in identifiers.\n" +
" column: implies column names can contain any character."
),
@@ -1732,8 +1768,9 @@ public class HiveConf extends Configurat
HIVE_CHECK_CROSS_PRODUCT("hive.exec.check.crossproducts", true,
"Check if a plan contains a Cross Product. If there is one, output a warning to the Session's console."),
- HIVE_LOCALIZE_RESOURCE_WAIT_INTERVAL("hive.localize.resource.wait.interval", 5000L,
- "Time in milliseconds to wait for another thread to localize the same resource for hive-tez."),
+ HIVE_LOCALIZE_RESOURCE_WAIT_INTERVAL("hive.localize.resource.wait.interval", "5000ms",
+ new TimeValidator(TimeUnit.MILLISECONDS),
+ "Time to wait for another thread to localize the same resource for hive-tez."),
HIVE_LOCALIZE_RESOURCE_NUM_WAIT_ATTEMPTS("hive.localize.resource.num.wait.attempts", 5,
"The number of attempts waiting for localizing a resource in hive-tez."),
TEZ_AUTO_REDUCER_PARALLELISM("hive.tez.auto.reducer.parallelism", false,
@@ -1843,11 +1880,29 @@ public class HiveConf extends Configurat
return validator == null ? null : validator.validate(value);
}
+ public String validatorDescription() {
+ return validator == null ? null : validator.toDescription();
+ }
+
public String typeString() {
- return valType.typeString();
+ String type = valType.typeString();
+ if (valType == VarType.STRING && validator != null) {
+ if (validator instanceof TimeValidator) {
+ type += "(TIME)";
+ }
+ }
+ return type;
+ }
+
+ public String getRawDescription() {
+ return description;
}
public String getDescription() {
+ String validator = validatorDescription();
+ if (validator != null) {
+ return validator + ".\n" + description;
+ }
return description;
}
@@ -1983,6 +2038,82 @@ public class HiveConf extends Configurat
setIntVar(this, var, val);
}
+ public static long getTimeVar(Configuration conf, ConfVars var, TimeUnit outUnit) {
+ return toTime(getVar(conf, var), getDefaultTimeUnit(var), outUnit);
+ }
+
+ public static void setTimeVar(Configuration conf, ConfVars var, long time, TimeUnit timeunit) {
+ assert (var.valClass == String.class) : var.varname;
+ conf.set(var.varname, time + stringFor(timeunit));
+ }
+
+ public long getTimeVar(ConfVars var, TimeUnit outUnit) {
+ return getTimeVar(this, var, outUnit);
+ }
+
+ public void setTimeVar(ConfVars var, long time, TimeUnit outUnit) {
+ setTimeVar(this, var, time, outUnit);
+ }
+
+ private static TimeUnit getDefaultTimeUnit(ConfVars var) {
+ TimeUnit inputUnit = null;
+ if (var.validator instanceof TimeValidator) {
+ inputUnit = ((TimeValidator)var.validator).getTimeUnit();
+ }
+ return inputUnit;
+ }
+
+ public static long toTime(String value, TimeUnit inputUnit, TimeUnit outUnit) {
+ String[] parsed = parseTime(value.trim());
+ return outUnit.convert(Long.valueOf(parsed[0].trim().trim()), unitFor(parsed[1].trim(), inputUnit));
+ }
+
+ private static String[] parseTime(String value) {
+ char[] chars = value.toCharArray();
+ int i = 0;
+ for (; i < chars.length && (chars[i] == '-' || Character.isDigit(chars[i])); i++) {
+ }
+ return new String[] {value.substring(0, i), value.substring(i)};
+ }
+
+ public static TimeUnit unitFor(String unit, TimeUnit defaultUnit) {
+ unit = unit.trim().toLowerCase();
+ if (unit.isEmpty()) {
+ if (defaultUnit == null) {
+ throw new IllegalArgumentException("Time unit is not specified");
+ }
+ return defaultUnit;
+ } else if (unit.equals("d") || unit.startsWith("day")) {
+ return TimeUnit.DAYS;
+ } else if (unit.equals("h") || unit.startsWith("hour")) {
+ return TimeUnit.HOURS;
+ } else if (unit.equals("m") || unit.startsWith("min")) {
+ return TimeUnit.MINUTES;
+ } else if (unit.equals("s") || unit.startsWith("sec")) {
+ return TimeUnit.SECONDS;
+ } else if (unit.equals("ms") || unit.startsWith("msec")) {
+ return TimeUnit.MILLISECONDS;
+ } else if (unit.equals("us") || unit.startsWith("usec")) {
+ return TimeUnit.MICROSECONDS;
+ } else if (unit.equals("ns") || unit.startsWith("nsec")) {
+ return TimeUnit.NANOSECONDS;
+ }
+ throw new IllegalArgumentException("Invalid time unit " + unit);
+ }
+
+ public static String stringFor(TimeUnit timeunit) {
+ switch (timeunit) {
+ case DAYS: return "day";
+ case HOURS: return "hour";
+ case MINUTES: return "min";
+ case SECONDS: return "sec";
+ case MILLISECONDS: return "msec";
+ case MICROSECONDS: return "usec";
+ case NANOSECONDS: return "nsec";
+ }
+ throw new IllegalArgumentException("Invalid timeunit " + timeunit);
+ }
+
public static long getLongVar(Configuration conf, ConfVars var) {
assert (var.valClass == Long.class) : var.varname;
return conf.getLong(var.varname, var.defaultLongVal);
Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/Validator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/Validator.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/Validator.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/Validator.java Thu Sep 4 02:49:46 2014
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/**
@@ -31,57 +32,85 @@ public interface Validator {
String validate(String value);
- static class StringSet implements Validator {
+ String toDescription();
+ class StringSet implements Validator {
+
+ private final boolean caseSensitive;
private final Set<String> expected = new LinkedHashSet<String>();
public StringSet(String... values) {
+ this(false, values);
+ }
+
+ public StringSet(boolean caseSensitive, String... values) {
+ this.caseSensitive = caseSensitive;
for (String value : values) {
- expected.add(value.toLowerCase());
+ expected.add(caseSensitive ? value : value.toLowerCase());
}
}
@Override
public String validate(String value) {
- if (value == null || !expected.contains(value.toLowerCase())) {
+ if (value == null || !expected.contains(caseSensitive ? value : value.toLowerCase())) {
return "Invalid value.. expects one of " + expected;
}
return null;
}
+
+ @Override
+ public String toDescription() {
+ return "Expects one of " + expected;
+ }
}
- static enum RANGE_TYPE {
+ enum TYPE {
INT {
@Override
protected boolean inRange(String value, Object lower, Object upper) {
int ivalue = Integer.parseInt(value);
- return (Integer)lower <= ivalue && ivalue <= (Integer)upper;
+ if (lower != null && ivalue < (Integer)lower) {
+ return false;
+ }
+ if (upper != null && ivalue > (Integer)upper) {
+ return false;
+ }
+ return true;
}
},
LONG {
@Override
protected boolean inRange(String value, Object lower, Object upper) {
long lvalue = Long.parseLong(value);
- return (Long)lower <= lvalue && lvalue <= (Long)upper;
+ if (lower != null && lvalue < (Long)lower) {
+ return false;
+ }
+ if (upper != null && lvalue > (Long)upper) {
+ return false;
+ }
+ return true;
}
},
FLOAT {
@Override
protected boolean inRange(String value, Object lower, Object upper) {
float fvalue = Float.parseFloat(value);
- return (Float)lower <= fvalue && fvalue <= (Float)upper;
+ if (lower != null && fvalue < (Float)lower) {
+ return false;
+ }
+ if (upper != null && fvalue > (Float)upper) {
+ return false;
+ }
+ return true;
}
};
- public static RANGE_TYPE valueOf(Object lower, Object upper) {
- if (lower instanceof Integer && upper instanceof Integer) {
- assert (Integer)lower < (Integer)upper;
+ public static TYPE valueOf(Object lower, Object upper) {
+ if (lower instanceof Integer || upper instanceof Integer) {
return INT;
- } else if (lower instanceof Long && upper instanceof Long) {
- assert (Long)lower < (Long)upper;
+ } else if (lower instanceof Long || upper instanceof Long) {
return LONG;
- } else if (lower instanceof Float && upper instanceof Float) {
- assert (Float)lower < (Float)upper;
+ } else if (lower instanceof Float || upper instanceof Float) {
return FLOAT;
}
throw new IllegalArgumentException("invalid range from " + lower + " to " + upper);
@@ -90,15 +119,15 @@ public interface Validator {
protected abstract boolean inRange(String value, Object lower, Object upper);
}
- static class RangeValidator implements Validator {
+ class RangeValidator implements Validator {
- private final RANGE_TYPE type;
+ private final TYPE type;
private final Object lower, upper;
public RangeValidator(Object lower, Object upper) {
this.lower = lower;
this.upper = upper;
- this.type = RANGE_TYPE.valueOf(lower, upper);
+ this.type = TYPE.valueOf(lower, upper);
}
@Override
@@ -115,9 +144,23 @@ public interface Validator {
}
return null;
}
+
+ @Override
+ public String toDescription() {
+ if (lower == null && upper == null) {
+ return null;
+ }
+ if (lower != null && upper != null) {
+ return "Expects value between " + lower + " and " + upper;
+ }
+ if (lower != null) {
+ return "Expects value bigger than " + lower;
+ }
+ return "Expects value smaller than " + upper;
+ }
}
- static class PatternSet implements Validator {
+ class PatternSet implements Validator {
private final List<Pattern> expected = new ArrayList<Pattern>();
@@ -139,15 +182,20 @@ public interface Validator {
}
return "Invalid value.. expects one of patterns " + expected;
}
+
+ @Override
+ public String toDescription() {
+ return "Expects one of the pattern in " + expected;
+ }
}
- static class RatioValidator implements Validator {
+ class RatioValidator implements Validator {
@Override
public String validate(String value) {
try {
float fvalue = Float.valueOf(value);
- if (fvalue <= 0 || fvalue >= 1) {
+ if (fvalue < 0 || fvalue > 1) {
return "Invalid ratio " + value + ", which should be in between 0 to 1";
}
} catch (NumberFormatException e) {
@@ -155,5 +203,77 @@ public interface Validator {
}
return null;
}
+
+ @Override
+ public String toDescription() {
+ return "Expects value between 0.0f and 1.0f";
+ }
+ }
+
+ class TimeValidator implements Validator {
+
+ private final TimeUnit timeUnit;
+
+ private final Long min;
+ private final boolean minInclusive;
+
+ private final Long max;
+ private final boolean maxInclusive;
+
+ public TimeValidator(TimeUnit timeUnit) {
+ this(timeUnit, null, false, null, false);
+ }
+
+ public TimeValidator(TimeUnit timeUnit,
+ Long min, boolean minInclusive, Long max, boolean maxInclusive) {
+ this.timeUnit = timeUnit;
+ this.min = min;
+ this.minInclusive = minInclusive;
+ this.max = max;
+ this.maxInclusive = maxInclusive;
+ }
+
+ public TimeUnit getTimeUnit() {
+ return timeUnit;
+ }
+
+ @Override
+ public String validate(String value) {
+ try {
+ long time = HiveConf.toTime(value, timeUnit, timeUnit);
+ if (min != null && (minInclusive ? time < min : time <= min)) {
+ return value + " is smaller than " + timeString(min);
+ }
+ if (max != null && (maxInclusive ? time > max : time >= max)) {
+ return value + " is bigger than " + timeString(max);
+ }
+ } catch (Exception e) {
+ return e.toString();
+ }
+ return null;
+ }
+
+ public String toDescription() {
+ String description =
+ "Expects a time value with unit " +
+ "(d/day, h/hour, m/min, s/sec, ms/msec, us/usec, ns/nsec)" +
+ ", which is " + HiveConf.stringFor(timeUnit) + " if not specified";
+ if (min != null && max != null) {
+ description += ".\nThe time should be in between " +
+ timeString(min) + (minInclusive ? " (inclusive)" : " (exclusive)") + " and " +
+ timeString(max) + (maxInclusive ? " (inclusive)" : " (exclusive)");
+ } else if (min != null) {
+ description += ".\nThe time should be bigger than " +
+ (minInclusive ? "or equal to " : "") + timeString(min);
+ } else if (max != null) {
+ description += ".\nThe time should be smaller than " +
+ (maxInclusive ? "or equal to " : "") + timeString(max);
+ }
+ return description;
+ }
+
+ private String timeString(long time) {
+ return time + " " + HiveConf.stringFor(timeUnit);
+ }
}
}
Modified: hive/branches/cbo/hbase-handler/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/pom.xml?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/pom.xml (original)
+++ hive/branches/cbo/hbase-handler/pom.xml Thu Sep 4 02:49:46 2014
@@ -36,34 +36,9 @@
<!-- intra-project -->
<dependency>
<groupId>org.apache.hive</groupId>
- <artifactId>hive-common</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-metastore</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-serde</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-service</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${project.version}</version>
</dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-shims</artifactId>
- <version>${project.version}</version>
- </dependency>
<!-- inter-project -->
<dependency>
<groupId>commons-lang</groupId>
Propchange: hive/branches/cbo/hbase-handler/pom.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Sep 4 02:49:46 2014
@@ -0,0 +1,5 @@
+/hive/branches/branch-0.11/hbase-handler/pom.xml:1480385,1480458,1481120,1481344,1481346,1481348,1481352,1483872,1505184
+/hive/branches/spark/hbase-handler/pom.xml:1608589-1621357
+/hive/branches/tez/hbase-handler/pom.xml:1494760-1573249
+/hive/branches/vectorization/hbase-handler/pom.xml:1466908-1527856
+/hive/trunk/hbase-handler/pom.xml:1605012-1622394
Modified: hive/branches/cbo/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java (original)
+++ hive/branches/cbo/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java Thu Sep 4 02:49:46 2014
@@ -224,7 +224,7 @@ public abstract class DataType {
if (o1[i] == o2[i]) {
continue;
}
- if (o1[i] > o1[i]) {
+ if (o1[i] > o2[i]) {
return 1;
} else {
return -1;
Modified: hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java (original)
+++ hive/branches/cbo/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java Thu Sep 4 02:49:46 2014
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
@@ -91,13 +92,11 @@ public class TestPermsGrp extends TestCa
hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
- hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
- hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
- hcatConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname, "60");
+ hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, TimeUnit.SECONDS);
hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
clientWH = new Warehouse(hcatConf);
msc = new HiveMetaStoreClient(hcatConf, null);