You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2012/12/23 21:54:15 UTC
svn commit: r1425525 [1/7] - in /hbase/branches/0.94-test: ./ bin/ conf/
security/src/main/java/org/apache/hadoop/hbase/ipc/
security/src/main/java/org/apache/hadoop/hbase/security/access/
security/src/test/java/org/apache/hadoop/hbase/security/access/...
Author: larsh
Date: Sun Dec 23 20:54:12 2012
New Revision: 1425525
URL: http://svn.apache.org/viewvc?rev=1425525&view=rev
Log:
back tp latest
Added:
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationHLogReaderManager.java
- copied unchanged from r1425406, hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationHLogReaderManager.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClassTestFinder.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/ClassTestFinder.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClusterManager.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/ClusterManager.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IngestIntegrationTestBase.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/IngestIntegrationTestBase.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTests.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/IntegrationTests.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/Mocking.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/master/Mocking.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/handler/
- copied from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/master/handler/
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionBusyWait.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionBusyWait.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplitCompressed.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplitCompressed.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithCompression.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithCompression.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/StoppableImplementation.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/util/StoppableImplementation.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
- copied unchanged from r1425406, hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
Modified:
hbase/branches/0.94-test/ (props changed)
hbase/branches/0.94-test/bin/hbase
hbase/branches/0.94-test/conf/hbase-env.sh
hbase/branches/0.94-test/conf/log4j.properties
hbase/branches/0.94-test/pom.xml
hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java
hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java
hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Increment.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Put.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Result.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/RowLock.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Scan.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorClassLoader.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/EnableTableHandler.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/security/User.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/Threads.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java
hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
hbase/branches/0.94-test/src/main/resources/hbase-default.xml
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/LargeTests.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/MediumTests.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/SmallTests.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestDrainingServer.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ipc/TestPBOnWritableRpc.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMXBean.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestExactCounterMetric.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestExponentiallyDecayingSample.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsHistogram.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestMXBean.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenMasterInitializing.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/replication/TestReplication.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestSizeBasedThrottler.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTable.java
hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTableReadOnly.java
Propchange: hbase/branches/0.94-test/
------------------------------------------------------------------------------
svn:mergeinfo = /hbase/branches/0.94:1425353-1425406
Modified: hbase/branches/0.94-test/bin/hbase
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/bin/hbase?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/bin/hbase (original)
+++ hbase/branches/0.94-test/bin/hbase Sun Dec 23 20:54:12 2012
@@ -248,6 +248,21 @@ fi
# restore ordinary behaviour
unset IFS
+#Set the right GC options based on the what we are running
+declare -a client_cmds=("shell" "hbck" "hlog" "hfile" "zkcli")
+for cmd in $client_cmds; do
+ if [[ $cmd == $COMMAND ]]; then
+ client=true
+ break
+ fi
+done
+
+if [[ $client ]]; then
+ HBASE_OPTS="$HBASE_OPTS $CLIENT_GC_OPTS"
+else
+ HBASE_OPTS="$HBASE_OPTS $SERVER_GC_OPTS"
+fi
+
# figure out which class to run
if [ "$COMMAND" = "shell" ] ; then
# eg export JRUBY_HOME=/usr/local/share/jruby
Modified: hbase/branches/0.94-test/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/conf/hbase-env.sh?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/conf/hbase-env.sh (original)
+++ hbase/branches/0.94-test/conf/hbase-env.sh Sun Dec 23 20:54:12 2012
@@ -21,6 +21,10 @@
# Set environment variables here.
+# This script sets variables multiple times over the course of starting an hbase process,
+# so try to keep things idempotent unless you want to take an even deeper look
+# into the startup scripts (bin/hbase, etc.)
+
# The java implementation to use. Java 1.6 required.
# export JAVA_HOME=/usr/java/jdk1.6.0/
@@ -34,12 +38,20 @@
# Below are what we set by default. May only work with SUN JVM.
# For more on why as well as other possible settings,
# see http://wiki.apache.org/hadoop/PerformanceTuning
-export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC"
+export HBASE_OPTS="-XX:+UseConcMarkSweepGC"
+
+# Uncomment below to enable java garbage collection logging for the server-side processes
+# this enables basic gc logging for the server processes to the .out file
+# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS"
+
+# this enables gc logging using automatic GC log rolling. Only applies to jdk 1.6.0_34+ and 1.7.0_2+. Either use this set of options or the one above
+# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=1 -XX:GCLogFileSize=512M $HBASE_GC_OPTS"
-# Uncomment below to enable java garbage collection logging in the .out file.
-# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS"
+# Uncomment below to enable java garbage collection logging for the client processes in the .out file.
+# export CLIENT_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS"
-# Uncomment below (along with above GC logging) to put GC information in its own logfile (will set HBASE_GC_OPTS)
+# Uncomment below (along with above GC logging) to put GC information in its own logfile (will set HBASE_GC_OPTS).
+# This applies to both the server and client GC options above
# export HBASE_USE_GC_LOGFILE=true
Modified: hbase/branches/0.94-test/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/conf/log4j.properties?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/conf/log4j.properties (original)
+++ hbase/branches/0.94-test/conf/log4j.properties Sun Dec 23 20:54:12 2012
@@ -39,6 +39,7 @@ log4j.appender.DRFAS.layout=org.apache.l
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
+#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#
# Null Appender
Modified: hbase/branches/0.94-test/pom.xml
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/pom.xml?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/pom.xml (original)
+++ hbase/branches/0.94-test/pom.xml Sun Dec 23 20:54:12 2012
@@ -36,7 +36,7 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase</artifactId>
<packaging>jar</packaging>
- <version>0.94.3</version>
+ <version>0.94.4-SNAPSHOT</version>
<name>HBase</name>
<description>
HBase is the &lt;a href="http://hadoop.apache.org"&rt;Hadoop</a&rt; database. Use it when you need
@@ -401,9 +401,8 @@
<include>${integrationtest.include}</include>
</includes>
<excludes>
- <exlude>${unittest.include}</exlude>
+ <exclude>${unittest.include}</exclude>
<exclude>**/*$*</exclude>
- <exclude>${test.exclude.pattern}</exclude>
</excludes>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<environmentVariables>
@@ -764,6 +763,12 @@
<configuration>
<skip>false</skip>
<forkMode>always</forkMode>
+ <!-- TODO: failsafe does timeout, but verify does not fail the build because of the timeout.
+ I believe it is a failsafe bug, we may consider using surefire -->
+ <forkedProcessTimeoutInSeconds>1800</forkedProcessTimeoutInSeconds>
+ <argLine>-enableassertions -Xmx1900m
+ -Djava.security.egd=file:/dev/./urandom -Djava.net.preferIPv4Stack=true</argLine>
+ <testFailureIgnore>false</testFailureIgnore>
</configuration>
</plugin>
<plugin>
@@ -1012,8 +1017,9 @@
<protobuf.version>2.4.0a</protobuf.version>
<stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.8.0</thrift.version>
- <zookeeper.version>3.4.3</zookeeper.version>
+ <zookeeper.version>3.4.5</zookeeper.version>
<hadoop-snappy.version>0.0.1-SNAPSHOT</hadoop-snappy.version>
+ <clover.version>2.6.3</clover.version>
<package.prefix>/usr</package.prefix>
<package.conf.dir>/etc/hbase</package.conf.dir>
@@ -1709,7 +1715,7 @@
</property>
</activation>
<properties>
- <hadoop.version>1.1.0</hadoop.version>
+ <hadoop.version>1.1.1</hadoop.version>
<slf4j.version>1.4.3</slf4j.version>
</properties>
<dependencies>
@@ -2352,6 +2358,61 @@
<surefire.firstPartGroups></surefire.firstPartGroups>
</properties>
</profile>
+
+ <!-- Profile for running clover. You need to have a clover license under ~/.clover.license for ${clover.version}
+or you can provide the license with -Dmaven.clover.licenseLocation=/path/to/license. Committers can find
+the license under https://svn.apache.org/repos/private/committers/donated-licenses/clover/
+Note that clover 2.6.3 does not run with maven 3, so you have to use maven2. The report will be generated
+under target/site/clover/index.html when you run
+MAVEN_OPTS=-Xmx2048m mvn clean test -Pclover site -->
+ <profile>
+ <id>clover</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ <property>
+ <name>clover</name>
+ </property>
+ </activation>
+ <properties>
+ <maven.clover.licenseLocation>${user.home}/.clover.license</maven.clover.licenseLocation>
+ <clover.version>2.6.3</clover.version>
+ </properties>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>com.atlassian.maven.plugins</groupId>
+ <artifactId>maven-clover2-plugin</artifactId>
+ <version>${clover.version}</version>
+ <configuration>
+ <includesAllSourceRoots>true</includesAllSourceRoots>
+ <includesTestSourceRoots>true</includesTestSourceRoots>
+ <targetPercentage>50%</targetPercentage>
+ <generateHtml>true</generateHtml>
+ <generateXml>true</generateXml>
+ <excludes>
+ <exclude>**/generated/**</exclude>
+ </excludes>
+ </configuration>
+ <executions>
+ <execution>
+ <id>clover-setup</id>
+ <phase>process-sources</phase>
+ <goals>
+ <goal>setup</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>clover</id>
+ <phase>site</phase>
+ <goals>
+ <goal>clover</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
<!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. -->
Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java Sun Dec 23 20:54:12 2012
@@ -98,7 +98,7 @@ public class SecureClient extends HBaseC
User ticket = remoteId.getTicket();
Class<?> protocol = remoteId.getProtocol();
- this.useSasl = User.isSecurityEnabled();
+ this.useSasl = User.isHBaseSecurityEnabled(conf);
if (useSasl && protocol != null) {
TokenInfo tokenInfo = protocol.getAnnotation(TokenInfo.class);
if (tokenInfo != null) {
Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java Sun Dec 23 20:54:12 2012
@@ -684,7 +684,7 @@ public abstract class SecureServer exten
conf, serverName, highPriorityLevel);
this.authorize =
conf.getBoolean(HADOOP_SECURITY_AUTHORIZATION, false);
- this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
+ this.isSecurityEnabled = User.isHBaseSecurityEnabled(this.conf);
if (isSecurityEnabled) {
HBaseSaslRpcServer.init(conf);
Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java Sun Dec 23 20:54:12 2012
@@ -46,6 +46,8 @@ import org.apache.hadoop.hbase.coprocess
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
@@ -62,6 +64,7 @@ import org.apache.hadoop.hbase.security.
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
@@ -101,7 +104,7 @@ import com.google.common.collect.Sets;
* </p>
*/
public class AccessController extends BaseRegionObserver
- implements MasterObserver, AccessControllerProtocol {
+ implements MasterObserver, RegionServerObserver, AccessControllerProtocol {
/**
* Represents the result of an authorization check for logging and error
* reporting.
@@ -112,12 +115,14 @@ public class AccessController extends Ba
private final byte[] family;
private final byte[] qualifier;
private final Permission.Action action;
+ private final String request;
private final String reason;
private final User user;
- public AuthResult(boolean allowed, String reason, User user,
+ public AuthResult(boolean allowed, String request, String reason, User user,
Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
this.allowed = allowed;
+ this.request = request;
this.reason = reason;
this.user = user;
this.table = table;
@@ -132,6 +137,8 @@ public class AccessController extends Ba
public String getReason() { return reason; }
+ public String getRequest() { return request; }
+
public String toContextString() {
return "(user=" + (user != null ? user.getName() : "UNKNOWN") + ", " +
"scope=" + (table == null ? "GLOBAL" : Bytes.toString(table)) + ", " +
@@ -145,23 +152,23 @@ public class AccessController extends Ba
.append(toContextString()).toString();
}
- public static AuthResult allow(String reason, User user, Permission.Action action,
+ public static AuthResult allow(String request, String reason, User user, Permission.Action action,
byte[] table, byte[] family, byte[] qualifier) {
- return new AuthResult(true, reason, user, action, table, family, qualifier);
+ return new AuthResult(true, request, reason, user, action, table, family, qualifier);
}
- public static AuthResult allow(String reason, User user, Permission.Action action, byte[] table) {
- return new AuthResult(true, reason, user, action, table, null, null);
+ public static AuthResult allow(String request, String reason, User user, Permission.Action action, byte[] table) {
+ return new AuthResult(true, request, reason, user, action, table, null, null);
}
- public static AuthResult deny(String reason, User user,
+ public static AuthResult deny(String request, String reason, User user,
Permission.Action action, byte[] table) {
- return new AuthResult(false, reason, user, action, table, null, null);
+ return new AuthResult(false, request, reason, user, action, table, null, null);
}
- public static AuthResult deny(String reason, User user,
+ public static AuthResult deny(String request, String reason, User user,
Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
- return new AuthResult(false, reason, user, action, table, family, qualifier);
+ return new AuthResult(false, request, reason, user, action, table, family, qualifier);
}
}
@@ -252,7 +259,7 @@ public class AccessController extends Ba
* the request
* @return
*/
- AuthResult permissionGranted(User user, TablePermission.Action permRequest,
+ AuthResult permissionGranted(String request, User user, TablePermission.Action permRequest,
RegionCoprocessorEnvironment e,
Map<byte [], ? extends Collection<?>> families) {
HRegionInfo hri = e.getRegion().getRegionInfo();
@@ -262,12 +269,12 @@ public class AccessController extends Ba
// this is a very common operation, so deal with it quickly.
if (hri.isRootRegion() || hri.isMetaRegion()) {
if (permRequest == TablePermission.Action.READ) {
- return AuthResult.allow("All users allowed", user, permRequest, tableName);
+ return AuthResult.allow(request, "All users allowed", user, permRequest, tableName);
}
}
if (user == null) {
- return AuthResult.deny("No user associated with request!", null, permRequest, tableName);
+ return AuthResult.deny(request, "No user associated with request!", null, permRequest, tableName);
}
// Users with CREATE/ADMIN rights need to modify .META. and _acl_ table
@@ -281,12 +288,12 @@ public class AccessController extends Ba
(authManager.authorize(user, Permission.Action.CREATE) ||
authManager.authorize(user, Permission.Action.ADMIN)))
{
- return AuthResult.allow("Table permission granted", user, permRequest, tableName);
+ return AuthResult.allow(request, "Table permission granted", user, permRequest, tableName);
}
// 2. check for the table-level, if successful we can short-circuit
if (authManager.authorize(user, tableName, (byte[])null, permRequest)) {
- return AuthResult.allow("Table permission granted", user, permRequest, tableName);
+ return AuthResult.allow(request, "Table permission granted", user, permRequest, tableName);
}
// 3. check permissions against the requested families
@@ -307,7 +314,7 @@ public class AccessController extends Ba
for (byte[] qualifier : familySet) {
if (!authManager.authorize(user, tableName, family.getKey(),
qualifier, permRequest)) {
- return AuthResult.deny("Failed qualifier check", user,
+ return AuthResult.deny(request, "Failed qualifier check", user,
permRequest, tableName, family.getKey(), qualifier);
}
}
@@ -316,25 +323,25 @@ public class AccessController extends Ba
for (KeyValue kv : kvList) {
if (!authManager.authorize(user, tableName, family.getKey(),
kv.getQualifier(), permRequest)) {
- return AuthResult.deny("Failed qualifier check", user,
+ return AuthResult.deny(request, "Failed qualifier check", user,
permRequest, tableName, family.getKey(), kv.getQualifier());
}
}
}
} else {
// no qualifiers and family-level check already failed
- return AuthResult.deny("Failed family check", user, permRequest,
+ return AuthResult.deny(request, "Failed family check", user, permRequest,
tableName, family.getKey(), null);
}
}
// all family checks passed
- return AuthResult.allow("All family checks passed", user, permRequest,
+ return AuthResult.allow(request, "All family checks passed", user, permRequest,
tableName);
}
// 4. no families to check and table level access failed
- return AuthResult.deny("No families to check and table permission failed",
+ return AuthResult.deny(request, "No families to check and table permission failed",
user, permRequest, tableName);
}
@@ -349,6 +356,7 @@ public class AccessController extends Ba
" for user " + (result.getUser() != null ? result.getUser().getShortName() : "UNKNOWN") +
"; reason: " + result.getReason() +
"; remote address: " + (remoteAddr != null ? remoteAddr : "") +
+ "; request: " + result.getRequest() +
"; context: " + result.toContextString());
}
}
@@ -377,18 +385,20 @@ public class AccessController extends Ba
* @throws IOException if obtaining the current user fails
* @throws AccessDeniedException if user has no authorization
*/
- private void requirePermission(byte[] tableName, byte[] family, byte[] qualifier,
+ private void requirePermission(String request, byte[] tableName, byte[] family, byte[] qualifier,
Action... permissions) throws IOException {
User user = getActiveUser();
AuthResult result = null;
for (Action permission : permissions) {
if (authManager.authorize(user, tableName, family, qualifier, permission)) {
- result = AuthResult.allow("Table permission granted", user, permission, tableName, family, qualifier);
+ result = AuthResult.allow(request, "Table permission granted", user,
+ permission, tableName, family, qualifier);
break;
} else {
// rest of the world
- result = AuthResult.deny("Insufficient permissions", user, permission, tableName, family, qualifier);
+ result = AuthResult.deny(request, "Insufficient permissions", user,
+ permission, tableName, family, qualifier);
}
}
logResult(result);
@@ -403,12 +413,12 @@ public class AccessController extends Ba
* @throws IOException if obtaining the current user fails
* @throws AccessDeniedException if authorization is denied
*/
- private void requirePermission(Permission.Action perm) throws IOException {
+ private void requirePermission(String request, Permission.Action perm) throws IOException {
User user = getActiveUser();
if (authManager.authorize(user, perm)) {
- logResult(AuthResult.allow("Global check allowed", user, perm, null));
+ logResult(AuthResult.allow(request, "Global check allowed", user, perm, null));
} else {
- logResult(AuthResult.deny("Global check failed", user, perm, null));
+ logResult(AuthResult.deny(request, "Global check failed", user, perm, null));
throw new AccessDeniedException("Insufficient permissions for user '" +
(user != null ? user.getShortName() : "null") +"' (global, action=" +
perm.toString() + ")");
@@ -423,7 +433,7 @@ public class AccessController extends Ba
* @param families The set of column families present/required in the request
* @throws AccessDeniedException if the authorization check failed
*/
- private void requirePermission(Permission.Action perm,
+ private void requirePermission(String request, Permission.Action perm,
RegionCoprocessorEnvironment env, Collection<byte[]> families)
throws IOException {
// create a map of family-qualifier
@@ -431,7 +441,7 @@ public class AccessController extends Ba
for (byte[] family : families) {
familyMap.put(family, null);
}
- requirePermission(perm, env, familyMap);
+ requirePermission(request, perm, env, familyMap);
}
/**
@@ -442,12 +452,12 @@ public class AccessController extends Ba
* @param families The map of column families-qualifiers.
* @throws AccessDeniedException if the authorization check failed
*/
- private void requirePermission(Permission.Action perm,
+ private void requirePermission(String request, Permission.Action perm,
RegionCoprocessorEnvironment env,
Map<byte[], ? extends Collection<?>> families)
throws IOException {
User user = getActiveUser();
- AuthResult result = permissionGranted(user, perm, env, families);
+ AuthResult result = permissionGranted(request, user, perm, env, families);
logResult(result);
if (!result.isAllowed()) {
@@ -511,17 +521,31 @@ public class AccessController extends Ba
/* ---- MasterObserver implementation ---- */
public void start(CoprocessorEnvironment env) throws IOException {
- // if running on HMaster
+
+ ZooKeeperWatcher zk = null;
if (env instanceof MasterCoprocessorEnvironment) {
- MasterCoprocessorEnvironment e = (MasterCoprocessorEnvironment)env;
- this.authManager = TableAuthManager.get(
- e.getMasterServices().getZooKeeper(),
- e.getConfiguration());
+ // if running on HMaster
+ MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env;
+ zk = mEnv.getMasterServices().getZooKeeper();
+ } else if (env instanceof RegionServerCoprocessorEnvironment) {
+ RegionServerCoprocessorEnvironment rsEnv = (RegionServerCoprocessorEnvironment) env;
+ zk = rsEnv.getRegionServerServices().getZooKeeper();
+ } else if (env instanceof RegionCoprocessorEnvironment) {
+ // if running at region
+ regionEnv = (RegionCoprocessorEnvironment) env;
+ zk = regionEnv.getRegionServerServices().getZooKeeper();
}
- // if running at region
- if (env instanceof RegionCoprocessorEnvironment) {
- regionEnv = (RegionCoprocessorEnvironment)env;
+ // If zk is null or IOException while obtaining auth manager,
+ // throw RuntimeException so that the coprocessor is unloaded.
+ if (zk != null) {
+ try {
+ this.authManager = TableAuthManager.get(zk, env.getConfiguration());
+ } catch (IOException ioe) {
+ throw new RuntimeException("Error obtaining TableAuthManager", ioe);
+ }
+ } else {
+ throw new RuntimeException("Error obtaining TableAuthManager, zk found null.");
}
}
@@ -532,7 +556,7 @@ public class AccessController extends Ba
@Override
public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> c,
HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
- requirePermission(Permission.Action.CREATE);
+ requirePermission("createTable", Permission.Action.CREATE);
}
@Override
@@ -551,7 +575,7 @@ public class AccessController extends Ba
@Override
public void preDeleteTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("deleteTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -563,7 +587,7 @@ public class AccessController extends Ba
@Override
public void preModifyTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HTableDescriptor htd) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("modifyTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -580,7 +604,7 @@ public class AccessController extends Ba
@Override
public void preAddColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HColumnDescriptor column) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("addColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -590,7 +614,7 @@ public class AccessController extends Ba
@Override
public void preModifyColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HColumnDescriptor descriptor) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("modifyColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -600,7 +624,7 @@ public class AccessController extends Ba
@Override
public void preDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
byte[] col) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("deleteColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -613,7 +637,7 @@ public class AccessController extends Ba
@Override
public void preEnableTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("enableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -627,7 +651,7 @@ public class AccessController extends Ba
throw new AccessDeniedException("Not allowed to disable "
+ AccessControlLists.ACL_TABLE_NAME_STR + " table.");
}
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("disableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -637,7 +661,7 @@ public class AccessController extends Ba
@Override
public void preMove(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo region,
ServerName srcServer, ServerName destServer) throws IOException {
- requirePermission(region.getTableName(), null, null, Action.ADMIN);
+ requirePermission("move", region.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -648,7 +672,7 @@ public class AccessController extends Ba
@Override
public void preAssign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo)
throws IOException {
- requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
+ requirePermission("assign", regionInfo.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -658,7 +682,7 @@ public class AccessController extends Ba
@Override
public void preUnassign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo,
boolean force) throws IOException {
- requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
+ requirePermission("unassign", regionInfo.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -668,7 +692,7 @@ public class AccessController extends Ba
@Override
public void preBalance(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("balance", Permission.Action.ADMIN);
}
@Override
public void postBalance(ObserverContext<MasterCoprocessorEnvironment> c)
@@ -677,7 +701,7 @@ public class AccessController extends Ba
@Override
public boolean preBalanceSwitch(ObserverContext<MasterCoprocessorEnvironment> c,
boolean newValue) throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("balanceSwitch", Permission.Action.ADMIN);
return newValue;
}
@Override
@@ -687,13 +711,13 @@ public class AccessController extends Ba
@Override
public void preShutdown(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("shutdown", Permission.Action.ADMIN);
}
@Override
public void preStopMaster(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("stopMaster", Permission.Action.ADMIN);
}
@Override
@@ -707,27 +731,34 @@ public class AccessController extends Ba
/* ---- RegionObserver implementation ---- */
@Override
- public void postOpen(ObserverContext<RegionCoprocessorEnvironment> c) {
- RegionCoprocessorEnvironment e = c.getEnvironment();
- final HRegion region = e.getRegion();
+ public void preOpen(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
+ RegionCoprocessorEnvironment env = e.getEnvironment();
+ final HRegion region = env.getRegion();
if (region == null) {
- LOG.error("NULL region from RegionCoprocessorEnvironment in postOpen()");
+ LOG.error("NULL region from RegionCoprocessorEnvironment in preOpen()");
return;
+ } else {
+ HRegionInfo regionInfo = region.getRegionInfo();
+ if (isSpecialTable(regionInfo)) {
+ isSystemOrSuperUser(regionEnv.getConfiguration());
+ } else {
+ requirePermission("open", Action.ADMIN);
+ }
}
+ }
- try {
- this.authManager = TableAuthManager.get(
- e.getRegionServerServices().getZooKeeper(),
- regionEnv.getConfiguration());
- } catch (IOException ioe) {
- // pass along as a RuntimeException, so that the coprocessor is unloaded
- throw new RuntimeException("Error obtaining TableAuthManager", ioe);
+ @Override
+ public void postOpen(ObserverContext<RegionCoprocessorEnvironment> c) {
+ RegionCoprocessorEnvironment env = c.getEnvironment();
+ final HRegion region = env.getRegion();
+ if (region == null) {
+ LOG.error("NULL region from RegionCoprocessorEnvironment in postOpen()");
+ return;
}
-
if (AccessControlLists.isAclRegion(region)) {
aclRegion = true;
try {
- initialize(e);
+ initialize(env);
} catch (IOException ex) {
// if we can't obtain permissions, it's better to fail
// than perform checks incorrectly
@@ -738,32 +769,32 @@ public class AccessController extends Ba
@Override
public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("flush", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("split", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e,
final Store store, final InternalScanner scanner) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("compact", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
return scanner;
}
@Override
public void preCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> e,
final Store store, final List<StoreFile> candidates) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("compactSelection", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public void preGetClosestRowBefore(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final Result result)
throws IOException {
- requirePermission(TablePermission.Action.READ, c.getEnvironment(),
+ requirePermission("getClosestRowBefore", TablePermission.Action.READ, c.getEnvironment(),
(family != null ? Lists.newArrayList(family) : null));
}
@@ -776,7 +807,7 @@ public class AccessController extends Ba
*/
RegionCoprocessorEnvironment e = c.getEnvironment();
User requestUser = getActiveUser();
- AuthResult authResult = permissionGranted(requestUser,
+ AuthResult authResult = permissionGranted("get", requestUser,
TablePermission.Action.READ, e, get.getFamilyMap());
if (!authResult.isAllowed()) {
if (hasFamilyQualifierPermission(requestUser,
@@ -793,7 +824,7 @@ public class AccessController extends Ba
} else {
get.setFilter(filter);
}
- logResult(AuthResult.allow("Access allowed with filter", requestUser,
+ logResult(AuthResult.allow("get", "Access allowed with filter", requestUser,
TablePermission.Action.READ, authResult.table));
} else {
logResult(authResult);
@@ -809,7 +840,7 @@ public class AccessController extends Ba
@Override
public boolean preExists(final ObserverContext<RegionCoprocessorEnvironment> c,
final Get get, final boolean exists) throws IOException {
- requirePermission(TablePermission.Action.READ, c.getEnvironment(),
+ requirePermission("exists", TablePermission.Action.READ, c.getEnvironment(),
get.familySet());
return exists;
}
@@ -818,7 +849,7 @@ public class AccessController extends Ba
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
final Put put, final WALEdit edit, final boolean writeToWAL)
throws IOException {
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
+ requirePermission("put", TablePermission.Action.WRITE, c.getEnvironment(),
put.getFamilyMap());
}
@@ -834,7 +865,7 @@ public class AccessController extends Ba
public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final Delete delete, final WALEdit edit, final boolean writeToWAL)
throws IOException {
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
+ requirePermission("delete", TablePermission.Action.WRITE, c.getEnvironment(),
delete.getFamilyMap());
}
@@ -854,8 +885,8 @@ public class AccessController extends Ba
final WritableByteArrayComparable comparator, final Put put,
final boolean result) throws IOException {
Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
- requirePermission(TablePermission.Action.READ, c.getEnvironment(), familyMap);
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
+ requirePermission("checkAndPut", TablePermission.Action.READ, c.getEnvironment(), familyMap);
+ requirePermission("checkAndPut", TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
return result;
}
@@ -866,8 +897,8 @@ public class AccessController extends Ba
final WritableByteArrayComparable comparator, final Delete delete,
final boolean result) throws IOException {
Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
- requirePermission(TablePermission.Action.READ, c.getEnvironment(), familyMap);
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
+ requirePermission("checkAndDelete", TablePermission.Action.READ, c.getEnvironment(), familyMap);
+ requirePermission("checkAndDelete", TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
return result;
}
@@ -876,7 +907,7 @@ public class AccessController extends Ba
final byte [] row, final byte [] family, final byte [] qualifier,
final long amount, final boolean writeToWAL)
throws IOException {
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
+ requirePermission("incrementColumnValue", TablePermission.Action.WRITE, c.getEnvironment(),
Arrays.asList(new byte[][]{family}));
return -1;
}
@@ -884,7 +915,7 @@ public class AccessController extends Ba
@Override
public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
throws IOException {
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
+ requirePermission("append", TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
return null;
}
@@ -892,7 +923,7 @@ public class AccessController extends Ba
public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> c,
final Increment increment)
throws IOException {
- requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
+ requirePermission("increment", TablePermission.Action.WRITE, c.getEnvironment(),
increment.getFamilyMap().keySet());
return null;
}
@@ -906,7 +937,7 @@ public class AccessController extends Ba
*/
RegionCoprocessorEnvironment e = c.getEnvironment();
User user = getActiveUser();
- AuthResult authResult = permissionGranted(user, TablePermission.Action.READ, e,
+ AuthResult authResult = permissionGranted("scannerOpen", user, TablePermission.Action.READ, e,
scan.getFamilyMap());
if (!authResult.isAllowed()) {
if (hasFamilyQualifierPermission(user, TablePermission.Action.READ, e,
@@ -923,7 +954,7 @@ public class AccessController extends Ba
} else {
scan.setFilter(filter);
}
- logResult(AuthResult.allow("Access allowed with filter", user,
+ logResult(AuthResult.allow("scannerOpen", "Access allowed with filter", user,
TablePermission.Action.READ, authResult.table));
} else {
// no table/family level perms and no qualifier level perms, reject
@@ -999,7 +1030,7 @@ public class AccessController extends Ba
LOG.debug("Received request to grant access permission " + perm.toString());
}
- requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
+ requirePermission("grant", perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
AccessControlLists.addUserPermission(regionEnv.getConfiguration(), perm);
if (AUDITLOG.isTraceEnabled()) {
@@ -1029,7 +1060,8 @@ public class AccessController extends Ba
LOG.debug("Received request to revoke access permission " + perm.toString());
}
- requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
+ requirePermission("revoke", perm.getTable(), perm.getFamily(),
+ perm.getQualifier(), Action.ADMIN);
AccessControlLists.removeUserPermission(regionEnv.getConfiguration(), perm);
if (AUDITLOG.isTraceEnabled()) {
@@ -1055,7 +1087,7 @@ public class AccessController extends Ba
public List<UserPermission> getUserPermissions(final byte[] tableName) throws IOException {
// only allowed to be called on _acl_ region
if (aclRegion) {
- requirePermission(tableName, null, null, Action.ADMIN);
+ requirePermission("userPermissions", tableName, null, null, Action.ADMIN);
List<UserPermission> perms = AccessControlLists.getUserPermissions(
regionEnv.getConfiguration(), tableName);
@@ -1089,12 +1121,12 @@ public class AccessController extends Ba
}
}
- requirePermission(action, regionEnv, familyMap);
+ requirePermission("checkPermissions", action, regionEnv, familyMap);
}
} else {
for (Permission.Action action : permission.getActions()) {
- requirePermission(action);
+ requirePermission("checkPermissions", action);
}
}
}
@@ -1127,4 +1159,56 @@ public class AccessController extends Ba
}
return tableName;
}
+
+
+ @Override
+ public void preClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested)
+ throws IOException {
+ requirePermission("close", Permission.Action.ADMIN);
+ }
+
+ @Override
+ public void preLockRow(ObserverContext<RegionCoprocessorEnvironment> ctx, byte[] regionName,
+ byte[] row) throws IOException {
+ requirePermission("lockRow", getTableName(ctx.getEnvironment()), null, null,
+ Permission.Action.WRITE, Permission.Action.CREATE);
+ }
+
+ @Override
+ public void preUnlockRow(ObserverContext<RegionCoprocessorEnvironment> ctx, byte[] regionName,
+ long lockId) throws IOException {
+ requirePermission("unlockRow", getTableName(ctx.getEnvironment()), null, null,
+ Permission.Action.WRITE, Permission.Action.CREATE);
+ }
+
+ private void isSystemOrSuperUser(Configuration conf) throws IOException {
+ User user = User.getCurrent();
+ if (user == null) {
+ throw new IOException("Unable to obtain the current user, "
+ + "authorization checks for internal operations will not work correctly!");
+ }
+
+ String currentUser = user.getShortName();
+ List<String> superusers = Lists.asList(currentUser,
+ conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0]));
+
+ User activeUser = getActiveUser();
+ if (!(superusers.contains(activeUser.getShortName()))) {
+ throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null")
+ + "is not system or super user.");
+ }
+ }
+
+ private boolean isSpecialTable(HRegionInfo regionInfo) {
+ byte[] tableName = regionInfo.getTableName();
+ return tableName.equals(AccessControlLists.ACL_TABLE_NAME)
+ || tableName.equals(Bytes.toBytes("-ROOT-"))
+ || tableName.equals(Bytes.toBytes(".META."));
+ }
+
+ @Override
+ public void preStopRegionServer(ObserverContext<RegionServerCoprocessorEnvironment> env)
+ throws IOException {
+ requirePermission("stop", Permission.Action.ADMIN);
+ }
}
Modified: hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java (original)
+++ hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java Sun Dec 23 20:54:12 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.ipc.SecureRpcEngine;
import org.apache.hadoop.hbase.security.User;
@@ -32,8 +33,9 @@ public class SecureTestUtil {
conf.set("hadoop.security.authorization", "false");
conf.set("hadoop.security.authentication", "simple");
conf.set("hbase.rpc.engine", SecureRpcEngine.class.getName());
- conf.set("hbase.coprocessor.master.classes", AccessController.class.getName());
- conf.set("hbase.coprocessor.region.classes", AccessController.class.getName());
+ conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+ conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+ conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
// add the process running user to superusers
String currentUser = User.getCurrent().getName();
conf.set("hbase.superuser", "admin,"+currentUser);
Modified: hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (original)
+++ hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java Sun Dec 23 20:54:12 2012
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.HServerAd
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.UnknownRowLockException;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
@@ -51,9 +52,11 @@ import org.apache.hadoop.hbase.coprocess
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
+import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.Permission.Action;
@@ -79,6 +82,8 @@ public class TestAccessController {
private static User USER_ADMIN;
// user with rw permissions
private static User USER_RW;
+ // user with rw permissions on table.
+ private static User USER_RW_ON_TABLE;
// user with read-only permissions
private static User USER_RO;
// user is table owner. will have all permissions on table
@@ -93,6 +98,7 @@ public class TestAccessController {
private static MasterCoprocessorEnvironment CP_ENV;
private static RegionCoprocessorEnvironment RCP_ENV;
+ private static RegionServerCoprocessorEnvironment RSCP_ENV;
private static AccessController ACCESS_CONTROLLER;
@BeforeClass
@@ -107,6 +113,10 @@ public class TestAccessController {
ACCESS_CONTROLLER = (AccessController) cpHost.findCoprocessor(AccessController.class.getName());
CP_ENV = cpHost.createEnvironment(AccessController.class, ACCESS_CONTROLLER,
Coprocessor.PRIORITY_HIGHEST, 1, conf);
+ RegionServerCoprocessorHost rsHost = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0)
+ .getCoprocessorHost();
+ RSCP_ENV = rsHost.createEnvironment(AccessController.class, ACCESS_CONTROLLER,
+ Coprocessor.PRIORITY_HIGHEST, 1, conf);
// Wait for the ACL table to become available
TEST_UTIL.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 5000);
@@ -116,6 +126,7 @@ public class TestAccessController {
USER_ADMIN = User.createUserForTesting(conf, "admin2", new String[0]);
USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]);
USER_RO = User.createUserForTesting(conf, "rouser", new String[0]);
+ USER_RW_ON_TABLE = User.createUserForTesting(conf, "rwuser_1", new String[0]);
USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
USER_CREATE = User.createUserForTesting(conf, "tbl_create", new String[0]);
USER_NONE = User.createUserForTesting(conf, "nouser", new String[0]);
@@ -148,6 +159,9 @@ public class TestAccessController {
protocol.grant(new UserPermission(Bytes.toBytes(USER_CREATE.getShortName()), TEST_TABLE, null,
Permission.Action.CREATE));
+
+ protocol.grant(new UserPermission(Bytes.toBytes(USER_RW_ON_TABLE.getShortName()), TEST_TABLE,
+ null, Permission.Action.READ, Permission.Action.WRITE));
}
@AfterClass
@@ -161,6 +175,8 @@ public class TestAccessController {
user.runAs(action);
} catch (AccessDeniedException ade) {
fail("Expected action to pass for user '" + user.getShortName() + "' but was denied");
+ } catch (UnknownRowLockException exp){
+ //expected
}
}
}
@@ -1271,4 +1287,70 @@ public class TestAccessController {
}
}
+
+ @Test
+ public void testLockAction() throws Exception {
+ PrivilegedExceptionAction lockAction = new PrivilegedExceptionAction() {
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preLockRow(ObserverContext.createAndPrepare(RCP_ENV, null), null,
+ Bytes.toBytes("random_row"));
+ return null;
+ }
+ };
+ verifyAllowed(lockAction, SUPERUSER, USER_ADMIN, USER_OWNER, USER_CREATE, USER_RW_ON_TABLE);
+ verifyDenied(lockAction, USER_RO, USER_RW, USER_NONE);
+ }
+
+ @Test
+ public void testUnLockAction() throws Exception {
+ PrivilegedExceptionAction unLockAction = new PrivilegedExceptionAction() {
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preUnlockRow(ObserverContext.createAndPrepare(RCP_ENV, null), null,
+ 123456);
+ return null;
+ }
+ };
+ verifyAllowed(unLockAction, SUPERUSER, USER_ADMIN, USER_OWNER, USER_RW_ON_TABLE);
+ verifyDenied(unLockAction, USER_NONE, USER_RO, USER_RW);
+ }
+
+ @Test
+ public void testStopRegionServer() throws Exception {
+ PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preStopRegionServer(ObserverContext.createAndPrepare(RSCP_ENV, null));
+ return null;
+ }
+ };
+
+ verifyAllowed(action, SUPERUSER, USER_ADMIN);
+ verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
+ }
+
+ @Test
+ public void testOpenRegion() throws Exception {
+ PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preOpen(ObserverContext.createAndPrepare(RCP_ENV, null));
+ return null;
+ }
+ };
+
+ verifyAllowed(action, SUPERUSER, USER_ADMIN);
+ verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+ }
+
+ @Test
+ public void testCloseRegion() throws Exception {
+ PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preClose(ObserverContext.createAndPrepare(RCP_ENV, null), false);
+ return null;
+ }
+ };
+
+ verifyAllowed(action, SUPERUSER, USER_ADMIN);
+ verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+ }
+
}
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java Sun Dec 23 20:54:12 2012
@@ -157,6 +157,9 @@ public final class HConstants {
/** Default value for ZooKeeper session timeout */
public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;
+ /** Configuration key for whether to use ZK.multi */
+ public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";
+
/** Parameter name for port region server listens on. */
public static final String REGIONSERVER_PORT = "hbase.regionserver.port";
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java Sun Dec 23 20:54:12 2012
@@ -60,30 +60,14 @@ implements WritableComparable<HServerLoa
private int maxHeapMB = 0;
// Regionserver-level coprocessors, e.g., WALObserver implementations.
- // Region-level coprocessors, on the other hand, are stored inside RegionLoad
- // objects.
- private Set<String> coprocessors =
- new TreeSet<String>();
+ private Set<String> coprocessors = new TreeSet<String>();
/**
* HBASE-4070: Improve region server metrics to report loaded coprocessors.
- *
- * @return Returns the set of all coprocessors on this
- * regionserver, where this set is the union of the
- * regionserver-level coprocessors on one hand, and all of the region-level
- * coprocessors, on the other.
- *
- * We must iterate through all regions loaded on this regionserver to
- * obtain all of the region-level coprocessors.
+ * @return the set of all the server-wide coprocessors on this regionserver
*/
- public String[] getCoprocessors() {
- TreeSet<String> returnValue = new TreeSet<String>(coprocessors);
- for (Map.Entry<byte[], RegionLoad> rls: getRegionsLoad().entrySet()) {
- for (String coprocessor: rls.getValue().getCoprocessors()) {
- returnValue.add(coprocessor);
- }
- }
- return returnValue.toArray(new String[0]);
+ public String[] getRsCoprocessors() {
+ return coprocessors.toArray(new String[0]);
}
/** per-region load metrics */
@@ -145,10 +129,6 @@ implements WritableComparable<HServerLoa
*/
private int totalStaticBloomSizeKB;
- // Region-level coprocessors.
- Set<String> coprocessors =
- new TreeSet<String>();
-
/**
* Constructor, for Writable
*/
@@ -168,7 +148,6 @@ implements WritableComparable<HServerLoa
* @param writeRequestsCount
* @param totalCompactingKVs
* @param currentCompactedKVs
- * @param coprocessors
*/
public RegionLoad(final byte[] name, final int stores,
final int storefiles, final int storeUncompressedSizeMB,
@@ -177,8 +156,7 @@ implements WritableComparable<HServerLoa
final int rootIndexSizeKB, final int totalStaticIndexSizeKB,
final int totalStaticBloomSizeKB,
final long readRequestsCount, final long writeRequestsCount,
- final long totalCompactingKVs, final long currentCompactedKVs,
- final Set<String> coprocessors) {
+ final long totalCompactingKVs, final long currentCompactedKVs) {
this.name = name;
this.stores = stores;
this.storefiles = storefiles;
@@ -193,12 +171,6 @@ implements WritableComparable<HServerLoa
this.writeRequestsCount = writeRequestsCount;
this.totalCompactingKVs = totalCompactingKVs;
this.currentCompactedKVs = currentCompactedKVs;
- this.coprocessors = coprocessors;
- }
-
- // Getters
- private String[] getCoprocessors() {
- return coprocessors.toArray(new String[0]);
}
/**
@@ -400,9 +372,9 @@ implements WritableComparable<HServerLoa
this.totalCompactingKVs = in.readLong();
this.currentCompactedKVs = in.readLong();
int coprocessorsSize = in.readInt();
- coprocessors = new TreeSet<String>();
+ // Backward compatibility - there may be coprocessors in the region load, ignore them.
for (int i = 0; i < coprocessorsSize; i++) {
- coprocessors.add(in.readUTF());
+ in.readUTF();
}
}
@@ -431,9 +403,9 @@ implements WritableComparable<HServerLoa
this.totalCompactingKVs = WritableUtils.readVLong(in);
this.currentCompactedKVs = WritableUtils.readVLong(in);
int coprocessorsSize = WritableUtils.readVInt(in);
- coprocessors = new TreeSet<String>();
+ // Backward compatibility - there may be coprocessors in the region load, ignore them.
for (int i = 0; i < coprocessorsSize; i++) {
- coprocessors.add(in.readUTF());
+ in.readUTF();
}
}
@@ -454,10 +426,9 @@ implements WritableComparable<HServerLoa
WritableUtils.writeVInt(out, totalStaticBloomSizeKB);
WritableUtils.writeVLong(out, totalCompactingKVs);
WritableUtils.writeVLong(out, currentCompactedKVs);
- WritableUtils.writeVInt(out, coprocessors.size());
- for (String coprocessor: coprocessors) {
- out.writeUTF(coprocessor);
- }
+ // Backward compatibility - write out 0 as coprocessor count,
+ // we don't report region-level coprocessors anymore.
+ WritableUtils.writeVInt(out, 0);
}
/**
@@ -503,11 +474,6 @@ implements WritableComparable<HServerLoa
}
sb = Strings.appendKeyValue(sb, "compactionProgressPct",
compactionProgressPct);
- String coprocessors = Arrays.toString(getCoprocessors());
- if (coprocessors != null) {
- sb = Strings.appendKeyValue(sb, "coprocessors",
- Arrays.toString(getCoprocessors()));
- }
return sb.toString();
}
}
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java Sun Dec 23 20:54:12 2012
@@ -216,9 +216,6 @@ public class KeyValue implements Writabl
private int offset = 0;
private int length = 0;
- // the row cached
- private volatile byte [] rowCache = null;
-
/**
* @return True if a delete type, a {@link KeyValue.Type#Delete} or
* a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
@@ -987,7 +984,6 @@ public class KeyValue implements Writabl
int tsOffset = getTimestampOffset();
System.arraycopy(now, 0, this.bytes, tsOffset, Bytes.SIZEOF_LONG);
// clear cache or else getTimestamp() possibly returns an old value
- timestampCache = -1L;
return true;
}
return false;
@@ -1037,28 +1033,19 @@ public class KeyValue implements Writabl
* @return Row in a new byte array.
*/
public byte [] getRow() {
- if (rowCache == null) {
- int o = getRowOffset();
- short l = getRowLength();
- // initialize and copy the data into a local variable
- // in case multiple threads race here.
- byte local[] = new byte[l];
- System.arraycopy(getBuffer(), o, local, 0, l);
- rowCache = local; // volatile assign
- }
- return rowCache;
+ int o = getRowOffset();
+ short l = getRowLength();
+ byte result[] = new byte[l];
+ System.arraycopy(getBuffer(), o, result, 0, l);
+ return result;
}
/**
*
* @return Timestamp
*/
- private long timestampCache = -1;
public long getTimestamp() {
- if (timestampCache == -1) {
- timestampCache = getTimestamp(getKeyLength());
- }
- return timestampCache;
+ return getTimestamp(getKeyLength());
}
/**
@@ -2260,21 +2247,17 @@ public class KeyValue implements Writabl
// HeapSize
public long heapSize() {
- return ClassSize.align(ClassSize.OBJECT + (2 * ClassSize.REFERENCE) +
- ClassSize.align(ClassSize.ARRAY) + ClassSize.align(length) +
- (3 * Bytes.SIZEOF_INT) +
- ClassSize.align(ClassSize.ARRAY) +
- (2 * Bytes.SIZEOF_LONG));
+ return ClassSize.align(ClassSize.OBJECT + ClassSize.REFERENCE
+ + ClassSize.align(ClassSize.ARRAY) + ClassSize.align(length)
+ + (3 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_LONG);
}
// this overload assumes that the length bytes have already been read,
// and it expects the length of the KeyValue to be explicitly passed
// to it.
public void readFields(int length, final DataInput in) throws IOException {
- this.rowCache = null;
this.length = length;
this.offset = 0;
- this.timestampCache = -1;
this.keyLength = 0;
this.bytes = new byte[this.length];
in.readFully(this.bytes, 0, this.length);
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java Sun Dec 23 20:54:12 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase;
/**
* Thrown if a region server is passed an unknown row lock id
+ * @deprecated row locks are deprecated (and thus so our associated exceptions).
*/
public class UnknownRowLockException extends DoNotRetryIOException {
private static final long serialVersionUID = 993179627856392526L;
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java Sun Dec 23 20:54:12 2012
@@ -147,6 +147,38 @@ public class HFileArchiver {
}
/**
+ * Remove from the specified region the store files of the specified column family,
+ * either by archiving them or outright deletion
+ * @param fs the filesystem where the store files live
+ * @param conf {@link Configuration} to examine to determine the archive directory
+ * @param parent Parent region hosting the store files
+ * @param tableDir {@link Path} to where the table is being stored (for building the archive path)
+ * @param family the family hosting the store files
+ * @throws IOException if the files could not be correctly disposed.
+ */
+ public static void archiveFamily(FileSystem fs, Configuration conf,
+ HRegionInfo parent, Path tableDir, byte[] family) throws IOException {
+ Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));
+ FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir, null);
+ if (storeFiles == null) {
+ LOG.debug("No store files to dispose for region=" + parent.getRegionNameAsString() +
+ ", family=" + Bytes.toString(family));
+ return;
+ }
+
+ FileStatusConverter getAsFile = new FileStatusConverter(fs);
+ Collection<File> toArchive = Lists.transform(Arrays.asList(storeFiles), getAsFile);
+ Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, tableDir, family);
+
+ // do the actual archive
+ if (!resolveAndArchive(fs, storeArchiveDir, toArchive)) {
+ throw new IOException("Failed to archive/delete all the files for region:"
+ + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)
+ + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");
+ }
+ }
+
+ /**
* Remove the store files, either by archiving them or outright deletion
* @param fs the filesystem where the store files live
* @param parent Parent region hosting the store files
@@ -196,7 +228,7 @@ public class HFileArchiver {
if (!resolveAndArchive(fs, storeArchiveDir, storeFiles)) {
throw new IOException("Failed to archive/delete all the files for region:"
+ Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)
- + " into " + storeArchiveDir + "Something is probably arwy on the filesystem.");
+ + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");
}
}
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java Sun Dec 23 20:54:12 2012
@@ -432,7 +432,6 @@ public class MetaReader {
return true;
}
if (!isInsideTable(this.current, tableNameBytes)) return false;
- if (this.current.isSplitParent()) return true;
// Else call super and add this Result to the collection.
super.visit(r);
// Stop collecting regions from table after we get one.
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java Sun Dec 23 20:54:12 2012
@@ -86,6 +86,25 @@ public class Delete extends Mutation
}
/**
+ * Create a Delete operation for the specified row and timestamp.<p>
+ *
+ * If no further operations are done, this will delete all columns in all
+ * families of the specified row with a timestamp less than or equal to the
+ * specified timestamp.<p>
+ *
+ * This timestamp is ONLY used for a delete row operation. If specifying
+ * families or columns, you must specify each timestamp individually.
+ * @param row row key
+ * @param timestamp maximum version timestamp (only for delete row)
+ * @param rowLock previously acquired row lock, or null
+ * @deprecated {@link RowLock} is deprecated, use #de
+ */
+ public Delete(byte [] row, long timestamp) {
+ this.row = row;
+ this.ts = timestamp;
+ }
+
+ /**
* Create a Delete operation for the specified row and timestamp, using
* an optional row lock.<p>
*
@@ -98,6 +117,7 @@ public class Delete extends Mutation
* @param row row key
* @param timestamp maximum version timestamp (only for delete row)
* @param rowLock previously acquired row lock, or null
+ * @deprecated {@link RowLock} is deprecated, use {@link #Delete(byte[], long)}.
*/
public Delete(byte [] row, long timestamp, RowLock rowLock) {
this.row = row;
Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java?rev=1425525&r1=1425524&r2=1425525&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java Sun Dec 23 20:54:12 2012
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
@@ -97,6 +98,7 @@ public class Get extends OperationWithAt
* all columns in all families of the specified row.
* @param row row key
* @param rowLock previously acquired row lock, or null
+ * @deprecated {@link RowLock} is deprecated, use {@link #Get(byte[])}.
*/
public Get(byte [] row, RowLock rowLock) {
this.row = row;
@@ -131,6 +133,9 @@ public class Get extends OperationWithAt
if(set == null) {
set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
}
+ if (qualifier == null) {
+ qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ }
set.add(qualifier);
familyMap.put(family, set);
return this;