You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2012/12/23 20:34:56 UTC

svn commit: r1425513 [1/7] - in /hbase/branches/0.94-test: ./ bin/ conf/ security/src/main/java/org/apache/hadoop/hbase/ipc/ security/src/main/java/org/apache/hadoop/hbase/security/access/ security/src/test/java/org/apache/hadoop/hbase/security/access/...

Author: larsh
Date: Sun Dec 23 19:34:53 2012
New Revision: 1425513

URL: http://svn.apache.org/viewvc?rev=1425513&view=rev
Log:
rolling back to r1410200 - shortly after 0.94.3

Removed:
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationHLogReaderManager.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClassTestFinder.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ClusterManager.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IngestIntegrationTestBase.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTests.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/Mocking.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/handler/
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionBusyWait.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplitCompressed.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithCompression.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/StoppableImplementation.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
Modified:
    hbase/branches/0.94-test/bin/hbase
    hbase/branches/0.94-test/conf/hbase-env.sh
    hbase/branches/0.94-test/conf/log4j.properties
    hbase/branches/0.94-test/pom.xml
    hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java
    hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java
    hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
    hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
    hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Increment.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Put.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Result.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/RowLock.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Scan.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorClassLoader.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/EnableTableHandler.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/security/User.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/util/Threads.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java
    hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
    hbase/branches/0.94-test/src/main/resources/hbase-default.xml
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/LargeTests.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/MediumTests.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/SmallTests.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestDrainingServer.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/ipc/TestPBOnWritableRpc.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMXBean.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestExactCounterMetric.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestExponentiallyDecayingSample.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsHistogram.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestMXBean.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenMasterInitializing.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/replication/TestReplication.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/TestSizeBasedThrottler.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTable.java
    hbase/branches/0.94-test/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTableReadOnly.java

Modified: hbase/branches/0.94-test/bin/hbase
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/bin/hbase?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/bin/hbase (original)
+++ hbase/branches/0.94-test/bin/hbase Sun Dec 23 19:34:53 2012
@@ -248,21 +248,6 @@ fi
 # restore ordinary behaviour
 unset IFS
 
-#Set the right GC options based on the what we are running
-declare -a client_cmds=("shell" "hbck" "hlog" "hfile" "zkcli")
-for cmd in $client_cmds; do
-	if [[ $cmd == $COMMAND ]]; then
-		client=true
-		break
-	fi
-done
-
-if [[ $client ]]; then
-	HBASE_OPTS="$HBASE_OPTS $CLIENT_GC_OPTS"
-else
-	HBASE_OPTS="$HBASE_OPTS $SERVER_GC_OPTS"
-fi
-
 # figure out which class to run
 if [ "$COMMAND" = "shell" ] ; then
   # eg export JRUBY_HOME=/usr/local/share/jruby

Modified: hbase/branches/0.94-test/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/conf/hbase-env.sh?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/conf/hbase-env.sh (original)
+++ hbase/branches/0.94-test/conf/hbase-env.sh Sun Dec 23 19:34:53 2012
@@ -21,10 +21,6 @@
 
 # Set environment variables here.
 
-# This script sets variables multiple times over the course of starting an hbase process,
-# so try to keep things idempotent unless you want to take an even deeper look
-# into the startup scripts (bin/hbase, etc.)
-
 # The java implementation to use.  Java 1.6 required.
 # export JAVA_HOME=/usr/java/jdk1.6.0/
 
@@ -38,20 +34,12 @@
 # Below are what we set by default.  May only work with SUN JVM.
 # For more on why as well as other possible settings,
 # see http://wiki.apache.org/hadoop/PerformanceTuning
-export HBASE_OPTS="-XX:+UseConcMarkSweepGC"
-
-# Uncomment below to enable java garbage collection logging for the server-side processes
-# this enables basic gc logging for the server processes to the .out file
-# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS"
-
-# this enables gc logging using automatic GC log rolling. Only applies to jdk 1.6.0_34+ and 1.7.0_2+. Either use this set of options or the one above
-# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=1 -XX:GCLogFileSize=512M $HBASE_GC_OPTS"
+export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC"
 
-# Uncomment below to enable java garbage collection logging for the client processes in the .out file.
-# export CLIENT_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS"
+# Uncomment below to enable java garbage collection logging in the .out file.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps $HBASE_GC_OPTS" 
 
-# Uncomment below (along with above GC logging) to put GC information in its own logfile (will set HBASE_GC_OPTS).
-# This applies to both the server and client GC options above
+# Uncomment below (along with above GC logging) to put GC information in its own logfile (will set HBASE_GC_OPTS)
 # export HBASE_USE_GC_LOGFILE=true
 
 

Modified: hbase/branches/0.94-test/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/conf/log4j.properties?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/conf/log4j.properties (original)
+++ hbase/branches/0.94-test/conf/log4j.properties Sun Dec 23 19:34:53 2012
@@ -39,7 +39,6 @@ log4j.appender.DRFAS.layout=org.apache.l
 log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 log4j.category.SecurityLogger=${hbase.security.logger}
 log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
 
 #
 # Null Appender

Modified: hbase/branches/0.94-test/pom.xml
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/pom.xml?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/pom.xml (original)
+++ hbase/branches/0.94-test/pom.xml Sun Dec 23 19:34:53 2012
@@ -36,7 +36,7 @@
   <groupId>org.apache.hbase</groupId>
   <artifactId>hbase</artifactId>
   <packaging>jar</packaging>
-  <version>0.94.4-SNAPSHOT</version>
+  <version>0.94.3</version>
   <name>HBase</name>
   <description>
     HBase is the &amp;lt;a href="http://hadoop.apache.org"&amp;rt;Hadoop&lt;/a&amp;rt; database. Use it when you need
@@ -401,8 +401,9 @@
               <include>${integrationtest.include}</include>
             </includes>
             <excludes>
-              <exclude>${unittest.include}</exclude>
+              <exlude>${unittest.include}</exlude>
               <exclude>**/*$*</exclude>
+              <exclude>${test.exclude.pattern}</exclude>
             </excludes>
             <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
             <environmentVariables>
@@ -763,12 +764,6 @@
           <configuration>
             <skip>false</skip>
             <forkMode>always</forkMode>
-            <!-- TODO: failsafe does timeout, but verify does not fail the build because of the timeout.
-                 I believe it is a failsafe bug, we may consider using surefire -->
-            <forkedProcessTimeoutInSeconds>1800</forkedProcessTimeoutInSeconds>
-            <argLine>-enableassertions -Xmx1900m
-              -Djava.security.egd=file:/dev/./urandom</argLine>
-            <testFailureIgnore>false</testFailureIgnore>
           </configuration>
       </plugin>
       <plugin>
@@ -1017,9 +1012,8 @@
     <protobuf.version>2.4.0a</protobuf.version>
     <stax-api.version>1.0.1</stax-api.version>
     <thrift.version>0.8.0</thrift.version>
-    <zookeeper.version>3.4.5</zookeeper.version>
+    <zookeeper.version>3.4.3</zookeeper.version>
     <hadoop-snappy.version>0.0.1-SNAPSHOT</hadoop-snappy.version>
-    <clover.version>2.6.3</clover.version>
 
     <package.prefix>/usr</package.prefix>
     <package.conf.dir>/etc/hbase</package.conf.dir>
@@ -1715,7 +1709,7 @@
         </property>
       </activation>
       <properties>
-        <hadoop.version>1.1.1</hadoop.version>
+        <hadoop.version>1.1.0</hadoop.version>
         <slf4j.version>1.4.3</slf4j.version>
       </properties>
       <dependencies>
@@ -2358,61 +2352,6 @@
         <surefire.firstPartGroups></surefire.firstPartGroups>
       </properties>
     </profile>
-
-    <!-- Profile for running clover. You need to have a clover license under ~/.clover.license for ${clover.version}
-or you can provide the license with -Dmaven.clover.licenseLocation=/path/to/license. Committers can find
-the license under https://svn.apache.org/repos/private/committers/donated-licenses/clover/
-Note that clover 2.6.3 does not run with maven 3, so you have to use maven2. The report will be generated
-under target/site/clover/index.html when you run
-MAVEN_OPTS=-Xmx2048m mvn clean test -Pclover site -->
-    <profile>
-      <id>clover</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-        <property>
-          <name>clover</name>
-        </property>
-      </activation>
-      <properties>
-        <maven.clover.licenseLocation>${user.home}/.clover.license</maven.clover.licenseLocation>
-        <clover.version>2.6.3</clover.version>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>com.atlassian.maven.plugins</groupId>
-            <artifactId>maven-clover2-plugin</artifactId>
-            <version>${clover.version}</version>
-            <configuration>
-              <includesAllSourceRoots>true</includesAllSourceRoots>
-              <includesTestSourceRoots>true</includesTestSourceRoots>
-              <targetPercentage>50%</targetPercentage>
-              <generateHtml>true</generateHtml>
-              <generateXml>true</generateXml>
-              <excludes>
-                <exclude>**/generated/**</exclude>
-              </excludes>
-            </configuration>
-            <executions>
-              <execution>
-                <id>clover-setup</id>
-                <phase>process-sources</phase>
-                <goals>
-                  <goal>setup</goal>
-                </goals>
-              </execution>
-              <execution>
-                <id>clover</id>
-                <phase>site</phase>
-                <goals>
-                  <goal>clover</goal>
-                </goals>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
   </profiles>
 
   <!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. -->

Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java Sun Dec 23 19:34:53 2012
@@ -98,7 +98,7 @@ public class SecureClient extends HBaseC
 
       User ticket = remoteId.getTicket();
       Class<?> protocol = remoteId.getProtocol();
-      this.useSasl = User.isHBaseSecurityEnabled(conf);
+      this.useSasl = User.isSecurityEnabled();
       if (useSasl && protocol != null) {
         TokenInfo tokenInfo = protocol.getAnnotation(TokenInfo.class);
         if (tokenInfo != null) {

Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java Sun Dec 23 19:34:53 2012
@@ -684,7 +684,7 @@ public abstract class SecureServer exten
         conf, serverName, highPriorityLevel);
     this.authorize =
       conf.getBoolean(HADOOP_SECURITY_AUTHORIZATION, false);
-    this.isSecurityEnabled = User.isHBaseSecurityEnabled(this.conf);
+    this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
 
     if (isSecurityEnabled) {
       HBaseSaslRpcServer.init(conf);

Modified: hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (original)
+++ hbase/branches/0.94-test/security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java Sun Dec 23 19:34:53 2012
@@ -46,8 +46,6 @@ import org.apache.hadoop.hbase.coprocess
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
@@ -64,7 +62,6 @@ import org.apache.hadoop.hbase.security.
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
@@ -104,7 +101,7 @@ import com.google.common.collect.Sets;
  * </p>
  */
 public class AccessController extends BaseRegionObserver
-    implements MasterObserver, RegionServerObserver, AccessControllerProtocol {
+    implements MasterObserver, AccessControllerProtocol {
   /**
    * Represents the result of an authorization check for logging and error
    * reporting.
@@ -115,14 +112,12 @@ public class AccessController extends Ba
     private final byte[] family;
     private final byte[] qualifier;
     private final Permission.Action action;
-    private final String request;
     private final String reason;
     private final User user;
 
-    public AuthResult(boolean allowed, String request, String reason,  User user,
+    public AuthResult(boolean allowed, String reason,  User user,
         Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
       this.allowed = allowed;
-      this.request = request;
       this.reason = reason;
       this.user = user;
       this.table = table;
@@ -137,8 +132,6 @@ public class AccessController extends Ba
 
     public String getReason() { return reason; }
 
-    public String getRequest() { return request; }
-
     public String toContextString() {
       return "(user=" + (user != null ? user.getName() : "UNKNOWN") + ", " +
           "scope=" + (table == null ? "GLOBAL" : Bytes.toString(table)) + ", " +
@@ -152,23 +145,23 @@ public class AccessController extends Ba
           .append(toContextString()).toString();
     }
 
-    public static AuthResult allow(String request, String reason, User user, Permission.Action action,
+    public static AuthResult allow(String reason, User user, Permission.Action action,
         byte[] table, byte[] family, byte[] qualifier) {
-      return new AuthResult(true, request, reason, user, action, table, family, qualifier);
+      return new AuthResult(true, reason, user, action, table, family, qualifier);
     }
 
-    public static AuthResult allow(String request, String reason, User user, Permission.Action action, byte[] table) {
-      return new AuthResult(true, request, reason, user, action, table, null, null);
+    public static AuthResult allow(String reason, User user, Permission.Action action, byte[] table) {
+      return new AuthResult(true, reason, user, action, table, null, null);
     }
 
-    public static AuthResult deny(String request, String reason, User user,
+    public static AuthResult deny(String reason, User user,
         Permission.Action action, byte[] table) {
-      return new AuthResult(false, request, reason, user, action, table, null, null);
+      return new AuthResult(false, reason, user, action, table, null, null);
     }
 
-    public static AuthResult deny(String request, String reason, User user,
+    public static AuthResult deny(String reason, User user,
         Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
-      return new AuthResult(false, request, reason, user, action, table, family, qualifier);
+      return new AuthResult(false, reason, user, action, table, family, qualifier);
     }
   }
 
@@ -259,7 +252,7 @@ public class AccessController extends Ba
    * the request
    * @return
    */
-  AuthResult permissionGranted(String request, User user, TablePermission.Action permRequest,
+  AuthResult permissionGranted(User user, TablePermission.Action permRequest,
       RegionCoprocessorEnvironment e,
       Map<byte [], ? extends Collection<?>> families) {
     HRegionInfo hri = e.getRegion().getRegionInfo();
@@ -269,12 +262,12 @@ public class AccessController extends Ba
     // this is a very common operation, so deal with it quickly.
     if (hri.isRootRegion() || hri.isMetaRegion()) {
       if (permRequest == TablePermission.Action.READ) {
-        return AuthResult.allow(request, "All users allowed", user, permRequest, tableName);
+        return AuthResult.allow("All users allowed", user, permRequest, tableName);
       }
     }
 
     if (user == null) {
-      return AuthResult.deny(request, "No user associated with request!", null, permRequest, tableName);
+      return AuthResult.deny("No user associated with request!", null, permRequest, tableName);
     }
 
     // Users with CREATE/ADMIN rights need to modify .META. and _acl_ table
@@ -288,12 +281,12 @@ public class AccessController extends Ba
        (authManager.authorize(user, Permission.Action.CREATE) ||
         authManager.authorize(user, Permission.Action.ADMIN)))
     {
-       return AuthResult.allow(request, "Table permission granted", user, permRequest, tableName);
+       return AuthResult.allow("Table permission granted", user, permRequest, tableName);
     }
 
     // 2. check for the table-level, if successful we can short-circuit
     if (authManager.authorize(user, tableName, (byte[])null, permRequest)) {
-      return AuthResult.allow(request, "Table permission granted", user, permRequest, tableName);
+      return AuthResult.allow("Table permission granted", user, permRequest, tableName);
     }
 
     // 3. check permissions against the requested families
@@ -314,7 +307,7 @@ public class AccessController extends Ba
             for (byte[] qualifier : familySet) {
               if (!authManager.authorize(user, tableName, family.getKey(),
                                          qualifier, permRequest)) {
-                return AuthResult.deny(request, "Failed qualifier check", user,
+                return AuthResult.deny("Failed qualifier check", user,
                     permRequest, tableName, family.getKey(), qualifier);
               }
             }
@@ -323,25 +316,25 @@ public class AccessController extends Ba
             for (KeyValue kv : kvList) {
               if (!authManager.authorize(user, tableName, family.getKey(),
                       kv.getQualifier(), permRequest)) {
-                return AuthResult.deny(request, "Failed qualifier check", user,
+                return AuthResult.deny("Failed qualifier check", user,
                     permRequest, tableName, family.getKey(), kv.getQualifier());
               }
             }
           }
         } else {
           // no qualifiers and family-level check already failed
-          return AuthResult.deny(request, "Failed family check", user, permRequest,
+          return AuthResult.deny("Failed family check", user, permRequest,
               tableName, family.getKey(), null);
         }
       }
 
       // all family checks passed
-      return AuthResult.allow(request, "All family checks passed", user, permRequest,
+      return AuthResult.allow("All family checks passed", user, permRequest,
           tableName);
     }
 
     // 4. no families to check and table level access failed
-    return AuthResult.deny(request, "No families to check and table permission failed",
+    return AuthResult.deny("No families to check and table permission failed",
         user, permRequest, tableName);
   }
 
@@ -356,7 +349,6 @@ public class AccessController extends Ba
           " for user " + (result.getUser() != null ? result.getUser().getShortName() : "UNKNOWN") +
           "; reason: " + result.getReason() +
           "; remote address: " + (remoteAddr != null ? remoteAddr : "") +
-          "; request: " + result.getRequest() +
           "; context: " + result.toContextString());
     }
   }
@@ -385,20 +377,18 @@ public class AccessController extends Ba
    * @throws IOException if obtaining the current user fails
    * @throws AccessDeniedException if user has no authorization
    */
-  private void requirePermission(String request, byte[] tableName, byte[] family, byte[] qualifier,
+  private void requirePermission(byte[] tableName, byte[] family, byte[] qualifier,
       Action... permissions) throws IOException {
     User user = getActiveUser();
     AuthResult result = null;
 
     for (Action permission : permissions) {
       if (authManager.authorize(user, tableName, family, qualifier, permission)) {
-        result = AuthResult.allow(request, "Table permission granted", user,
-                                  permission, tableName, family, qualifier);
+        result = AuthResult.allow("Table permission granted", user, permission, tableName, family, qualifier);
         break;
       } else {
         // rest of the world
-        result = AuthResult.deny(request, "Insufficient permissions", user,
-                                 permission, tableName, family, qualifier);
+        result = AuthResult.deny("Insufficient permissions", user, permission, tableName, family, qualifier);
       }
     }
     logResult(result);
@@ -413,12 +403,12 @@ public class AccessController extends Ba
    * @throws IOException if obtaining the current user fails
    * @throws AccessDeniedException if authorization is denied
    */
-  private void requirePermission(String request, Permission.Action perm) throws IOException {
+  private void requirePermission(Permission.Action perm) throws IOException {
     User user = getActiveUser();
     if (authManager.authorize(user, perm)) {
-      logResult(AuthResult.allow(request, "Global check allowed", user, perm, null));
+      logResult(AuthResult.allow("Global check allowed", user, perm, null));
     } else {
-      logResult(AuthResult.deny(request, "Global check failed", user, perm, null));
+      logResult(AuthResult.deny("Global check failed", user, perm, null));
       throw new AccessDeniedException("Insufficient permissions for user '" +
           (user != null ? user.getShortName() : "null") +"' (global, action=" +
           perm.toString() + ")");
@@ -433,7 +423,7 @@ public class AccessController extends Ba
    * @param families The set of column families present/required in the request
    * @throws AccessDeniedException if the authorization check failed
    */
-  private void requirePermission(String request, Permission.Action perm,
+  private void requirePermission(Permission.Action perm,
         RegionCoprocessorEnvironment env, Collection<byte[]> families)
       throws IOException {
     // create a map of family-qualifier
@@ -441,7 +431,7 @@ public class AccessController extends Ba
     for (byte[] family : families) {
       familyMap.put(family, null);
     }
-    requirePermission(request, perm, env, familyMap);
+    requirePermission(perm, env, familyMap);
   }
 
   /**
@@ -452,12 +442,12 @@ public class AccessController extends Ba
    * @param families The map of column families-qualifiers.
    * @throws AccessDeniedException if the authorization check failed
    */
-  private void requirePermission(String request, Permission.Action perm,
+  private void requirePermission(Permission.Action perm,
         RegionCoprocessorEnvironment env,
         Map<byte[], ? extends Collection<?>> families)
       throws IOException {
     User user = getActiveUser();
-    AuthResult result = permissionGranted(request, user, perm, env, families);
+    AuthResult result = permissionGranted(user, perm, env, families);
     logResult(result);
 
     if (!result.isAllowed()) {
@@ -521,31 +511,17 @@ public class AccessController extends Ba
 
   /* ---- MasterObserver implementation ---- */
   public void start(CoprocessorEnvironment env) throws IOException {
-
-    ZooKeeperWatcher zk = null;
+    // if running on HMaster
     if (env instanceof MasterCoprocessorEnvironment) {
-      // if running on HMaster
-      MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env;
-      zk = mEnv.getMasterServices().getZooKeeper();      
-    } else if (env instanceof RegionServerCoprocessorEnvironment) {      
-      RegionServerCoprocessorEnvironment rsEnv = (RegionServerCoprocessorEnvironment) env;
-      zk = rsEnv.getRegionServerServices().getZooKeeper();      
-    } else if (env instanceof RegionCoprocessorEnvironment) {
-      // if running at region
-      regionEnv = (RegionCoprocessorEnvironment) env;
-      zk = regionEnv.getRegionServerServices().getZooKeeper();
+      MasterCoprocessorEnvironment e = (MasterCoprocessorEnvironment)env;
+      this.authManager = TableAuthManager.get(
+          e.getMasterServices().getZooKeeper(),
+          e.getConfiguration());
     }
 
-    // If zk is null or IOException while obtaining auth manager,
-    // throw RuntimeException so that the coprocessor is unloaded.
-    if (zk != null) {
-      try {
-        this.authManager = TableAuthManager.get(zk, env.getConfiguration());
-      } catch (IOException ioe) {
-        throw new RuntimeException("Error obtaining TableAuthManager", ioe);
-      }
-    } else {
-      throw new RuntimeException("Error obtaining TableAuthManager, zk found null.");
+    // if running at region
+    if (env instanceof RegionCoprocessorEnvironment) {
+      regionEnv = (RegionCoprocessorEnvironment)env;
     }
   }
 
@@ -556,7 +532,7 @@ public class AccessController extends Ba
   @Override
   public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> c,
       HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
-    requirePermission("createTable", Permission.Action.CREATE);
+    requirePermission(Permission.Action.CREATE);
   }
 
   @Override
@@ -575,7 +551,7 @@ public class AccessController extends Ba
   @Override
   public void preDeleteTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
       throws IOException {
-   requirePermission("deleteTable", tableName, null, null, Action.ADMIN, Action.CREATE);
+   requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -587,7 +563,7 @@ public class AccessController extends Ba
   @Override
   public void preModifyTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
       HTableDescriptor htd) throws IOException {
-    requirePermission("modifyTable", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -604,7 +580,7 @@ public class AccessController extends Ba
   @Override
   public void preAddColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
       HColumnDescriptor column) throws IOException {
-    requirePermission("addColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -614,7 +590,7 @@ public class AccessController extends Ba
   @Override
   public void preModifyColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
       HColumnDescriptor descriptor) throws IOException {
-    requirePermission("modifyColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -624,7 +600,7 @@ public class AccessController extends Ba
   @Override
   public void preDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
       byte[] col) throws IOException {
-    requirePermission("deleteColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -637,7 +613,7 @@ public class AccessController extends Ba
   @Override
   public void preEnableTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
       throws IOException {
-    requirePermission("enableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -651,7 +627,7 @@ public class AccessController extends Ba
       throw new AccessDeniedException("Not allowed to disable "
           + AccessControlLists.ACL_TABLE_NAME_STR + " table.");
     }
-    requirePermission("disableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
+    requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
   }
 
   @Override
@@ -661,7 +637,7 @@ public class AccessController extends Ba
   @Override
   public void preMove(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo region,
       ServerName srcServer, ServerName destServer) throws IOException {
-    requirePermission("move", region.getTableName(), null, null, Action.ADMIN);
+    requirePermission(region.getTableName(), null, null, Action.ADMIN);
   }
 
   @Override
@@ -672,7 +648,7 @@ public class AccessController extends Ba
   @Override
   public void preAssign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo)
       throws IOException {
-    requirePermission("assign", regionInfo.getTableName(), null, null, Action.ADMIN);
+    requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
   }
 
   @Override
@@ -682,7 +658,7 @@ public class AccessController extends Ba
   @Override
   public void preUnassign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo,
       boolean force) throws IOException {
-    requirePermission("unassign", regionInfo.getTableName(), null, null, Action.ADMIN);
+    requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
   }
 
   @Override
@@ -692,7 +668,7 @@ public class AccessController extends Ba
   @Override
   public void preBalance(ObserverContext<MasterCoprocessorEnvironment> c)
       throws IOException {
-    requirePermission("balance", Permission.Action.ADMIN);
+    requirePermission(Permission.Action.ADMIN);
   }
   @Override
   public void postBalance(ObserverContext<MasterCoprocessorEnvironment> c)
@@ -701,7 +677,7 @@ public class AccessController extends Ba
   @Override
   public boolean preBalanceSwitch(ObserverContext<MasterCoprocessorEnvironment> c,
       boolean newValue) throws IOException {
-    requirePermission("balanceSwitch", Permission.Action.ADMIN);
+    requirePermission(Permission.Action.ADMIN);
     return newValue;
   }
   @Override
@@ -711,13 +687,13 @@ public class AccessController extends Ba
   @Override
   public void preShutdown(ObserverContext<MasterCoprocessorEnvironment> c)
       throws IOException {
-    requirePermission("shutdown", Permission.Action.ADMIN);
+    requirePermission(Permission.Action.ADMIN);
   }
 
   @Override
   public void preStopMaster(ObserverContext<MasterCoprocessorEnvironment> c)
       throws IOException {
-    requirePermission("stopMaster", Permission.Action.ADMIN);
+    requirePermission(Permission.Action.ADMIN);
   }
 
   @Override
@@ -731,34 +707,27 @@ public class AccessController extends Ba
   /* ---- RegionObserver implementation ---- */
 
   @Override
-  public void preOpen(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
-    RegionCoprocessorEnvironment env = e.getEnvironment();
-    final HRegion region = env.getRegion();
-    if (region == null) {
-      LOG.error("NULL region from RegionCoprocessorEnvironment in preOpen()");
-      return;
-    } else {
-      HRegionInfo regionInfo = region.getRegionInfo();
-      if (isSpecialTable(regionInfo)) {
-        isSystemOrSuperUser(regionEnv.getConfiguration());
-      } else {
-        requirePermission("open", Action.ADMIN);
-      }
-    }
-  }
-
-  @Override
   public void postOpen(ObserverContext<RegionCoprocessorEnvironment> c) {
-    RegionCoprocessorEnvironment env = c.getEnvironment();
-    final HRegion region = env.getRegion();
+    RegionCoprocessorEnvironment e = c.getEnvironment();
+    final HRegion region = e.getRegion();
     if (region == null) {
       LOG.error("NULL region from RegionCoprocessorEnvironment in postOpen()");
       return;
     }
+
+    try {
+      this.authManager = TableAuthManager.get(
+          e.getRegionServerServices().getZooKeeper(),
+          regionEnv.getConfiguration());
+    } catch (IOException ioe) {
+      // pass along as a RuntimeException, so that the coprocessor is unloaded
+      throw new RuntimeException("Error obtaining TableAuthManager", ioe);
+    }
+
     if (AccessControlLists.isAclRegion(region)) {
       aclRegion = true;
       try {
-        initialize(env);
+        initialize(e);
       } catch (IOException ex) {
         // if we can't obtain permissions, it's better to fail
         // than perform checks incorrectly
@@ -769,32 +738,32 @@ public class AccessController extends Ba
 
   @Override
   public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
-    requirePermission("flush", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+    requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
   }
 
   @Override
   public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
-    requirePermission("split", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+    requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
   }
 
   @Override
   public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e,
       final Store store, final InternalScanner scanner) throws IOException {
-    requirePermission("compact", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+    requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
     return scanner;
   }
 
   @Override
   public void preCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> e,
       final Store store, final List<StoreFile> candidates) throws IOException {
-    requirePermission("compactSelection", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+    requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
   }
 
   @Override
   public void preGetClosestRowBefore(final ObserverContext<RegionCoprocessorEnvironment> c,
       final byte [] row, final byte [] family, final Result result)
       throws IOException {
-    requirePermission("getClosestRowBefore", TablePermission.Action.READ, c.getEnvironment(),
+    requirePermission(TablePermission.Action.READ, c.getEnvironment(),
         (family != null ? Lists.newArrayList(family) : null));
   }
 
@@ -807,7 +776,7 @@ public class AccessController extends Ba
       */
     RegionCoprocessorEnvironment e = c.getEnvironment();
     User requestUser = getActiveUser();
-    AuthResult authResult = permissionGranted("get", requestUser,
+    AuthResult authResult = permissionGranted(requestUser,
         TablePermission.Action.READ, e, get.getFamilyMap());
     if (!authResult.isAllowed()) {
       if (hasFamilyQualifierPermission(requestUser,
@@ -824,7 +793,7 @@ public class AccessController extends Ba
         } else {
           get.setFilter(filter);
         }
-        logResult(AuthResult.allow("get", "Access allowed with filter", requestUser,
+        logResult(AuthResult.allow("Access allowed with filter", requestUser,
             TablePermission.Action.READ, authResult.table));
       } else {
         logResult(authResult);
@@ -840,7 +809,7 @@ public class AccessController extends Ba
   @Override
   public boolean preExists(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Get get, final boolean exists) throws IOException {
-    requirePermission("exists", TablePermission.Action.READ, c.getEnvironment(),
+    requirePermission(TablePermission.Action.READ, c.getEnvironment(),
         get.familySet());
     return exists;
   }
@@ -849,7 +818,7 @@ public class AccessController extends Ba
   public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Put put, final WALEdit edit, final boolean writeToWAL)
       throws IOException {
-    requirePermission("put", TablePermission.Action.WRITE, c.getEnvironment(),
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
         put.getFamilyMap());
   }
 
@@ -865,7 +834,7 @@ public class AccessController extends Ba
   public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Delete delete, final WALEdit edit, final boolean writeToWAL)
       throws IOException {
-    requirePermission("delete", TablePermission.Action.WRITE, c.getEnvironment(),
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
         delete.getFamilyMap());
   }
 
@@ -885,8 +854,8 @@ public class AccessController extends Ba
       final WritableByteArrayComparable comparator, final Put put,
       final boolean result) throws IOException {
     Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
-    requirePermission("checkAndPut", TablePermission.Action.READ, c.getEnvironment(), familyMap);
-    requirePermission("checkAndPut", TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
+    requirePermission(TablePermission.Action.READ, c.getEnvironment(), familyMap);
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
     return result;
   }
 
@@ -897,8 +866,8 @@ public class AccessController extends Ba
       final WritableByteArrayComparable comparator, final Delete delete,
       final boolean result) throws IOException {
     Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
-    requirePermission("checkAndDelete", TablePermission.Action.READ, c.getEnvironment(), familyMap);
-    requirePermission("checkAndDelete", TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
+    requirePermission(TablePermission.Action.READ, c.getEnvironment(), familyMap);
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), familyMap);
     return result;
   }
 
@@ -907,7 +876,7 @@ public class AccessController extends Ba
       final byte [] row, final byte [] family, final byte [] qualifier,
       final long amount, final boolean writeToWAL)
       throws IOException {
-    requirePermission("incrementColumnValue", TablePermission.Action.WRITE, c.getEnvironment(),
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
         Arrays.asList(new byte[][]{family}));
     return -1;
   }
@@ -915,7 +884,7 @@ public class AccessController extends Ba
   @Override
   public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
       throws IOException {
-    requirePermission("append", TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
     return null;
   }
 
@@ -923,7 +892,7 @@ public class AccessController extends Ba
   public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Increment increment)
       throws IOException {
-    requirePermission("increment", TablePermission.Action.WRITE, c.getEnvironment(),
+    requirePermission(TablePermission.Action.WRITE, c.getEnvironment(),
         increment.getFamilyMap().keySet());
     return null;
   }
@@ -937,7 +906,7 @@ public class AccessController extends Ba
       */
     RegionCoprocessorEnvironment e = c.getEnvironment();
     User user = getActiveUser();
-    AuthResult authResult = permissionGranted("scannerOpen", user, TablePermission.Action.READ, e,
+    AuthResult authResult = permissionGranted(user, TablePermission.Action.READ, e,
         scan.getFamilyMap());
     if (!authResult.isAllowed()) {
       if (hasFamilyQualifierPermission(user, TablePermission.Action.READ, e,
@@ -954,7 +923,7 @@ public class AccessController extends Ba
         } else {
           scan.setFilter(filter);
         }
-        logResult(AuthResult.allow("scannerOpen", "Access allowed with filter", user,
+        logResult(AuthResult.allow("Access allowed with filter", user,
             TablePermission.Action.READ, authResult.table));
       } else {
         // no table/family level perms and no qualifier level perms, reject
@@ -1030,7 +999,7 @@ public class AccessController extends Ba
         LOG.debug("Received request to grant access permission " + perm.toString());
       }
 
-      requirePermission("grant", perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
+      requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
 
       AccessControlLists.addUserPermission(regionEnv.getConfiguration(), perm);
       if (AUDITLOG.isTraceEnabled()) {
@@ -1060,8 +1029,7 @@ public class AccessController extends Ba
         LOG.debug("Received request to revoke access permission " + perm.toString());
       }
 
-      requirePermission("revoke", perm.getTable(), perm.getFamily(),
-                        perm.getQualifier(), Action.ADMIN);
+      requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
 
       AccessControlLists.removeUserPermission(regionEnv.getConfiguration(), perm);
       if (AUDITLOG.isTraceEnabled()) {
@@ -1087,7 +1055,7 @@ public class AccessController extends Ba
   public List<UserPermission> getUserPermissions(final byte[] tableName) throws IOException {
     // only allowed to be called on _acl_ region
     if (aclRegion) {
-      requirePermission("userPermissions", tableName, null, null, Action.ADMIN);
+      requirePermission(tableName, null, null, Action.ADMIN);
 
       List<UserPermission> perms = AccessControlLists.getUserPermissions(
         regionEnv.getConfiguration(), tableName);
@@ -1121,12 +1089,12 @@ public class AccessController extends Ba
             }
           }
 
-          requirePermission("checkPermissions", action, regionEnv, familyMap);
+          requirePermission(action, regionEnv, familyMap);
         }
 
       } else {
         for (Permission.Action action : permission.getActions()) {
-          requirePermission("checkPermissions", action);
+          requirePermission(action);
         }
       }
     }
@@ -1159,56 +1127,4 @@ public class AccessController extends Ba
     }
     return tableName;
   }
-
-
-  @Override
-  public void preClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested)
-      throws IOException {
-    requirePermission("close", Permission.Action.ADMIN);
-  }
-
-  @Override
-  public void preLockRow(ObserverContext<RegionCoprocessorEnvironment> ctx, byte[] regionName,
-      byte[] row) throws IOException {
-    requirePermission("lockRow", getTableName(ctx.getEnvironment()), null, null,
-      Permission.Action.WRITE, Permission.Action.CREATE);
-  }
-
-  @Override
-  public void preUnlockRow(ObserverContext<RegionCoprocessorEnvironment> ctx, byte[] regionName,
-      long lockId) throws IOException {
-    requirePermission("unlockRow", getTableName(ctx.getEnvironment()), null, null,
-      Permission.Action.WRITE, Permission.Action.CREATE);
-  }
-
-  private void isSystemOrSuperUser(Configuration conf) throws IOException {
-    User user = User.getCurrent();
-    if (user == null) {
-      throw new IOException("Unable to obtain the current user, "
-          + "authorization checks for internal operations will not work correctly!");
-    }
-
-    String currentUser = user.getShortName();
-    List<String> superusers = Lists.asList(currentUser,
-      conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0]));
-
-    User activeUser = getActiveUser();
-    if (!(superusers.contains(activeUser.getShortName()))) {
-      throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null")
-          + "is not system or super user.");
-    }
-  }
-
-  private boolean isSpecialTable(HRegionInfo regionInfo) {
-    byte[] tableName = regionInfo.getTableName();
-    return tableName.equals(AccessControlLists.ACL_TABLE_NAME)
-      || tableName.equals(Bytes.toBytes("-ROOT-"))
-      || tableName.equals(Bytes.toBytes(".META."));
-  }
-
-  @Override
-  public void preStopRegionServer(ObserverContext<RegionServerCoprocessorEnvironment> env)
-      throws IOException {
-    requirePermission("stop", Permission.Action.ADMIN);
-  }
 }

Modified: hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java (original)
+++ hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java Sun Dec 23 19:34:53 2012
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.security
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.ipc.SecureRpcEngine;
 import org.apache.hadoop.hbase.security.User;
 
@@ -33,9 +32,8 @@ public class SecureTestUtil {
     conf.set("hadoop.security.authorization", "false");
     conf.set("hadoop.security.authentication", "simple");
     conf.set("hbase.rpc.engine", SecureRpcEngine.class.getName());
-    conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
-    conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName());
-    conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+    conf.set("hbase.coprocessor.master.classes", AccessController.class.getName());
+    conf.set("hbase.coprocessor.region.classes", AccessController.class.getName());
     // add the process running user to superusers
     String currentUser = User.getCurrent().getName();
     conf.set("hbase.superuser", "admin,"+currentUser);

Modified: hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (original)
+++ hbase/branches/0.94-test/security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java Sun Dec 23 19:34:53 2012
@@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.HServerAd
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.UnknownRowLockException;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
@@ -52,11 +51,9 @@ import org.apache.hadoop.hbase.coprocess
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
-import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
@@ -82,8 +79,6 @@ public class TestAccessController {
   private static User USER_ADMIN;
   // user with rw permissions
   private static User USER_RW;
-  // user with rw permissions on table.
-  private static User USER_RW_ON_TABLE;
   // user with read-only permissions
   private static User USER_RO;
   // user is table owner. will have all permissions on table
@@ -98,7 +93,6 @@ public class TestAccessController {
 
   private static MasterCoprocessorEnvironment CP_ENV;
   private static RegionCoprocessorEnvironment RCP_ENV;
-  private static RegionServerCoprocessorEnvironment RSCP_ENV;
   private static AccessController ACCESS_CONTROLLER;
 
   @BeforeClass
@@ -113,10 +107,6 @@ public class TestAccessController {
     ACCESS_CONTROLLER = (AccessController) cpHost.findCoprocessor(AccessController.class.getName());
     CP_ENV = cpHost.createEnvironment(AccessController.class, ACCESS_CONTROLLER,
       Coprocessor.PRIORITY_HIGHEST, 1, conf);
-    RegionServerCoprocessorHost rsHost = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0)
-        .getCoprocessorHost();
-    RSCP_ENV = rsHost.createEnvironment(AccessController.class, ACCESS_CONTROLLER, 
-      Coprocessor.PRIORITY_HIGHEST, 1, conf);
 
     // Wait for the ACL table to become available
     TEST_UTIL.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 5000);
@@ -126,7 +116,6 @@ public class TestAccessController {
     USER_ADMIN = User.createUserForTesting(conf, "admin2", new String[0]);
     USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]);
     USER_RO = User.createUserForTesting(conf, "rouser", new String[0]);
-    USER_RW_ON_TABLE = User.createUserForTesting(conf, "rwuser_1", new String[0]);
     USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
     USER_CREATE = User.createUserForTesting(conf, "tbl_create", new String[0]);
     USER_NONE = User.createUserForTesting(conf, "nouser", new String[0]);
@@ -159,9 +148,6 @@ public class TestAccessController {
 
     protocol.grant(new UserPermission(Bytes.toBytes(USER_CREATE.getShortName()), TEST_TABLE, null,
         Permission.Action.CREATE));
-    
-    protocol.grant(new UserPermission(Bytes.toBytes(USER_RW_ON_TABLE.getShortName()), TEST_TABLE,
-      null, Permission.Action.READ, Permission.Action.WRITE));
   }
 
   @AfterClass
@@ -175,8 +161,6 @@ public class TestAccessController {
         user.runAs(action);
       } catch (AccessDeniedException ade) {
         fail("Expected action to pass for user '" + user.getShortName() + "' but was denied");
-      } catch (UnknownRowLockException exp){
-        //expected
       }
     }
   }
@@ -1287,70 +1271,4 @@ public class TestAccessController {
     }
 
   }
-
-  @Test
-  public void testLockAction() throws Exception {
-    PrivilegedExceptionAction lockAction = new PrivilegedExceptionAction() {
-      public Object run() throws Exception {
-        ACCESS_CONTROLLER.preLockRow(ObserverContext.createAndPrepare(RCP_ENV, null), null,
-          Bytes.toBytes("random_row"));
-        return null;
-      }
-    };
-    verifyAllowed(lockAction, SUPERUSER, USER_ADMIN, USER_OWNER, USER_CREATE, USER_RW_ON_TABLE);
-    verifyDenied(lockAction, USER_RO, USER_RW, USER_NONE);
-  }
-
-  @Test
-  public void testUnLockAction() throws Exception {
-    PrivilegedExceptionAction unLockAction = new PrivilegedExceptionAction() {
-      public Object run() throws Exception {
-        ACCESS_CONTROLLER.preUnlockRow(ObserverContext.createAndPrepare(RCP_ENV, null), null,
-          123456);
-        return null;
-      }
-    };
-    verifyAllowed(unLockAction, SUPERUSER, USER_ADMIN, USER_OWNER, USER_RW_ON_TABLE);
-    verifyDenied(unLockAction, USER_NONE, USER_RO, USER_RW);
-  }
-
-  @Test
-  public void testStopRegionServer() throws Exception {
-    PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
-      public Object run() throws Exception {
-        ACCESS_CONTROLLER.preStopRegionServer(ObserverContext.createAndPrepare(RSCP_ENV, null));
-        return null;
-      }
-    };
-
-    verifyAllowed(action, SUPERUSER, USER_ADMIN);
-    verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
-  }
-
-  @Test
-  public void testOpenRegion() throws Exception {
-    PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
-      public Object run() throws Exception {
-        ACCESS_CONTROLLER.preOpen(ObserverContext.createAndPrepare(RCP_ENV, null));
-        return null;
-      }
-    };
-
-    verifyAllowed(action, SUPERUSER, USER_ADMIN);
-    verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
-  }
-
-  @Test
-  public void testCloseRegion() throws Exception {
-    PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
-      public Object run() throws Exception {
-        ACCESS_CONTROLLER.preClose(ObserverContext.createAndPrepare(RCP_ENV, null), false);
-        return null;
-      }
-    };
-
-    verifyAllowed(action, SUPERUSER, USER_ADMIN);
-    verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
-  }
-
 }

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HConstants.java Sun Dec 23 19:34:53 2012
@@ -157,9 +157,6 @@ public final class HConstants {
   /** Default value for ZooKeeper session timeout */
   public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;
 
-  /** Configuration key for whether to use ZK.multi */
-  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";
-
   /** Parameter name for port region server listens on. */
   public static final String REGIONSERVER_PORT = "hbase.regionserver.port";
 

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/HServerLoad.java Sun Dec 23 19:34:53 2012
@@ -60,14 +60,30 @@ implements WritableComparable<HServerLoa
   private int maxHeapMB = 0;
 
   // Regionserver-level coprocessors, e.g., WALObserver implementations.
-  private Set<String> coprocessors = new TreeSet<String>();
+  // Region-level coprocessors, on the other hand, are stored inside RegionLoad
+  // objects.
+  private Set<String> coprocessors =
+      new TreeSet<String>();
 
   /**
    * HBASE-4070: Improve region server metrics to report loaded coprocessors.
-   * @return the set of all the server-wide coprocessors on this regionserver
+   *
+   * @return Returns the set of all coprocessors on this
+   * regionserver, where this set is the union of the
+   * regionserver-level coprocessors on one hand, and all of the region-level
+   * coprocessors, on the other.
+   *
+   * We must iterate through all regions loaded on this regionserver to
+   * obtain all of the region-level coprocessors.
    */
-  public String[] getRsCoprocessors() {
-    return coprocessors.toArray(new String[0]);
+  public String[] getCoprocessors() {
+    TreeSet<String> returnValue = new TreeSet<String>(coprocessors);
+    for (Map.Entry<byte[], RegionLoad> rls: getRegionsLoad().entrySet()) {
+      for (String coprocessor: rls.getValue().getCoprocessors()) {
+        returnValue.add(coprocessor);
+      }
+    }
+    return returnValue.toArray(new String[0]);
   }
 
   /** per-region load metrics */
@@ -129,6 +145,10 @@ implements WritableComparable<HServerLoa
      */
     private int totalStaticBloomSizeKB;
 
+    // Region-level coprocessors.
+    Set<String> coprocessors =
+        new TreeSet<String>();
+
     /**
      * Constructor, for Writable
      */
@@ -148,6 +168,7 @@ implements WritableComparable<HServerLoa
      * @param writeRequestsCount
      * @param totalCompactingKVs
      * @param currentCompactedKVs
+     * @param coprocessors
      */
     public RegionLoad(final byte[] name, final int stores,
         final int storefiles, final int storeUncompressedSizeMB,
@@ -156,7 +177,8 @@ implements WritableComparable<HServerLoa
         final int rootIndexSizeKB, final int totalStaticIndexSizeKB,
         final int totalStaticBloomSizeKB,
         final long readRequestsCount, final long writeRequestsCount,
-        final long totalCompactingKVs, final long currentCompactedKVs) {
+        final long totalCompactingKVs, final long currentCompactedKVs,
+        final Set<String> coprocessors) {
       this.name = name;
       this.stores = stores;
       this.storefiles = storefiles;
@@ -171,6 +193,12 @@ implements WritableComparable<HServerLoa
       this.writeRequestsCount = writeRequestsCount;
       this.totalCompactingKVs = totalCompactingKVs;
       this.currentCompactedKVs = currentCompactedKVs;
+      this.coprocessors = coprocessors;
+    }
+
+    // Getters
+    private String[] getCoprocessors() {
+      return coprocessors.toArray(new String[0]);
     }
 
     /**
@@ -372,9 +400,9 @@ implements WritableComparable<HServerLoa
       this.totalCompactingKVs = in.readLong();
       this.currentCompactedKVs = in.readLong();
       int coprocessorsSize = in.readInt();
-      // Backward compatibility - there may be coprocessors in the region load, ignore them.
+      coprocessors = new TreeSet<String>();
       for (int i = 0; i < coprocessorsSize; i++) {
-        in.readUTF();
+        coprocessors.add(in.readUTF());
       }
     }
     
@@ -403,9 +431,9 @@ implements WritableComparable<HServerLoa
       this.totalCompactingKVs = WritableUtils.readVLong(in);
       this.currentCompactedKVs = WritableUtils.readVLong(in);
       int coprocessorsSize = WritableUtils.readVInt(in);
-      // Backward compatibility - there may be coprocessors in the region load, ignore them.
+      coprocessors = new TreeSet<String>();
       for (int i = 0; i < coprocessorsSize; i++) {
-        in.readUTF();
+        coprocessors.add(in.readUTF());
       }
     }
 
@@ -426,9 +454,10 @@ implements WritableComparable<HServerLoa
       WritableUtils.writeVInt(out, totalStaticBloomSizeKB);
       WritableUtils.writeVLong(out, totalCompactingKVs);
       WritableUtils.writeVLong(out, currentCompactedKVs);
-      // Backward compatibility - write out 0 as coprocessor count,
-      // we don't report region-level coprocessors anymore.
-      WritableUtils.writeVInt(out, 0);
+      WritableUtils.writeVInt(out, coprocessors.size());
+      for (String coprocessor: coprocessors) {
+        out.writeUTF(coprocessor);
+      }
     }
 
     /**
@@ -474,6 +503,11 @@ implements WritableComparable<HServerLoa
       }
       sb = Strings.appendKeyValue(sb, "compactionProgressPct",
           compactionProgressPct);
+      String coprocessors = Arrays.toString(getCoprocessors());
+      if (coprocessors != null) {
+        sb = Strings.appendKeyValue(sb, "coprocessors",
+            Arrays.toString(getCoprocessors()));
+      }
       return sb.toString();
     }
   }

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/KeyValue.java Sun Dec 23 19:34:53 2012
@@ -216,6 +216,9 @@ public class KeyValue implements Writabl
   private int offset = 0;
   private int length = 0;
 
+  // the row cached
+  private volatile byte [] rowCache = null;
+
   /**
    * @return True if a delete type, a {@link KeyValue.Type#Delete} or
    * a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
@@ -984,6 +987,7 @@ public class KeyValue implements Writabl
       int tsOffset = getTimestampOffset();
       System.arraycopy(now, 0, this.bytes, tsOffset, Bytes.SIZEOF_LONG);
       // clear cache or else getTimestamp() possibly returns an old value
+      timestampCache = -1L;
       return true;
     }
     return false;
@@ -1033,19 +1037,28 @@ public class KeyValue implements Writabl
    * @return Row in a new byte array.
    */
   public byte [] getRow() {
-    int o = getRowOffset();
-    short l = getRowLength();
-    byte result[] = new byte[l];
-    System.arraycopy(getBuffer(), o, result, 0, l);
-    return result;
+    if (rowCache == null) {
+      int o = getRowOffset();
+      short l = getRowLength();
+      // initialize and copy the data into a local variable
+      // in case multiple threads race here.
+      byte local[] = new byte[l];
+      System.arraycopy(getBuffer(), o, local, 0, l);
+      rowCache = local; // volatile assign
+    }
+    return rowCache;
   }
 
   /**
    *
    * @return Timestamp
    */
+  private long timestampCache = -1;
   public long getTimestamp() {
-    return getTimestamp(getKeyLength());
+    if (timestampCache == -1) {
+      timestampCache = getTimestamp(getKeyLength());
+    }
+    return timestampCache;
   }
 
   /**
@@ -2247,17 +2260,21 @@ public class KeyValue implements Writabl
 
   // HeapSize
   public long heapSize() {
-    return ClassSize.align(ClassSize.OBJECT + ClassSize.REFERENCE
-        + ClassSize.align(ClassSize.ARRAY) + ClassSize.align(length)
-        + (3 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_LONG);
+    return ClassSize.align(ClassSize.OBJECT + (2 * ClassSize.REFERENCE) +
+        ClassSize.align(ClassSize.ARRAY) + ClassSize.align(length) +
+        (3 * Bytes.SIZEOF_INT) +
+        ClassSize.align(ClassSize.ARRAY) +
+        (2 * Bytes.SIZEOF_LONG));
   }
 
   // this overload assumes that the length bytes have already been read,
   // and it expects the length of the KeyValue to be explicitly passed
   // to it.
   public void readFields(int length, final DataInput in) throws IOException {
+    this.rowCache = null;
     this.length = length;
     this.offset = 0;
+    this.timestampCache = -1;
     this.keyLength = 0;
     this.bytes = new byte[this.length];
     in.readFully(this.bytes, 0, this.length);

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java Sun Dec 23 19:34:53 2012
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase;
 
 /**
  * Thrown if a region server is passed an unknown row lock id
- * @deprecated row locks are deprecated (and thus so our associated exceptions).
  */
 public class UnknownRowLockException extends DoNotRetryIOException {
   private static final long serialVersionUID = 993179627856392526L;

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java Sun Dec 23 19:34:53 2012
@@ -147,38 +147,6 @@ public class HFileArchiver {
   }
 
   /**
-   * Remove from the specified region the store files of the specified column family,
-   * either by archiving them or outright deletion
-   * @param fs the filesystem where the store files live
-   * @param conf {@link Configuration} to examine to determine the archive directory
-   * @param parent Parent region hosting the store files
-   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)
-   * @param family the family hosting the store files
-   * @throws IOException if the files could not be correctly disposed.
-   */
-  public static void archiveFamily(FileSystem fs, Configuration conf,
-      HRegionInfo parent, Path tableDir, byte[] family) throws IOException {
-    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));
-    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir, null);
-    if (storeFiles == null) {
-      LOG.debug("No store files to dispose for region=" + parent.getRegionNameAsString() +
-          ", family=" + Bytes.toString(family));
-      return;
-    }
-
-    FileStatusConverter getAsFile = new FileStatusConverter(fs);
-    Collection<File> toArchive = Lists.transform(Arrays.asList(storeFiles), getAsFile);
-    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, tableDir, family);
-
-    // do the actual archive
-    if (!resolveAndArchive(fs, storeArchiveDir, toArchive)) {
-      throw new IOException("Failed to archive/delete all the files for region:"
-          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)
-          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");
-    }
-  }
-
-  /**
    * Remove the store files, either by archiving them or outright deletion
    * @param fs the filesystem where the store files live
    * @param parent Parent region hosting the store files
@@ -228,7 +196,7 @@ public class HFileArchiver {
     if (!resolveAndArchive(fs, storeArchiveDir, storeFiles)) {
       throw new IOException("Failed to archive/delete all the files for region:"
           + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)
-          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");
+          + " into " + storeArchiveDir + "Something is probably arwy on the filesystem.");
     }
   }
 

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java Sun Dec 23 19:34:53 2012
@@ -432,6 +432,7 @@ public class MetaReader {
           return true;
         }
         if (!isInsideTable(this.current, tableNameBytes)) return false;
+        if (this.current.isSplitParent()) return true;
         // Else call super and add this Result to the collection.
         super.visit(r);
         // Stop collecting regions from table after we get one.

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Delete.java Sun Dec 23 19:34:53 2012
@@ -86,25 +86,6 @@ public class Delete extends Mutation
   }
 
   /**
-   * Create a Delete operation for the specified row and timestamp.<p>
-   *
-   * If no further operations are done, this will delete all columns in all
-   * families of the specified row with a timestamp less than or equal to the
-   * specified timestamp.<p>
-   *
-   * This timestamp is ONLY used for a delete row operation.  If specifying
-   * families or columns, you must specify each timestamp individually.
-   * @param row row key
-   * @param timestamp maximum version timestamp (only for delete row)
-   * @param rowLock previously acquired row lock, or null
-   * @deprecated {@link RowLock} is deprecated, use #de
-   */
-  public Delete(byte [] row, long timestamp) {
-    this.row = row;
-    this.ts = timestamp;
-  }
-
-  /**
    * Create a Delete operation for the specified row and timestamp, using
    * an optional row lock.<p>
    *
@@ -117,7 +98,6 @@ public class Delete extends Mutation
    * @param row row key
    * @param timestamp maximum version timestamp (only for delete row)
    * @param rowLock previously acquired row lock, or null
-   * @deprecated {@link RowLock} is deprecated, use {@link #Delete(byte[], long)}.
    */
   public Delete(byte [] row, long timestamp, RowLock rowLock) {
     this.row = row;

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/Get.java Sun Dec 23 19:34:53 2012
@@ -20,7 +20,6 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.TimeRange;
@@ -98,7 +97,6 @@ public class Get extends OperationWithAt
    * all columns in all families of the specified row.
    * @param row row key
    * @param rowLock previously acquired row lock, or null
-   * @deprecated {@link RowLock} is deprecated, use {@link #Get(byte[])}.
    */
   public Get(byte [] row, RowLock rowLock) {
     this.row = row;
@@ -133,9 +131,6 @@ public class Get extends OperationWithAt
     if(set == null) {
       set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
     }
-    if (qualifier == null) {
-      qualifier = HConstants.EMPTY_BYTE_ARRAY;
-    }
     set.add(qualifier);
     familyMap.put(family, set);
     return this;

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java Sun Dec 23 19:34:53 2012
@@ -494,8 +494,8 @@ public class HConnectionManager {
     private final Object masterLock = new Object();
     private volatile boolean closed;
     private volatile boolean aborted;
-    private volatile boolean resetting;
     private volatile HMasterInterface master;
+    private volatile boolean masterChecked;
     // ZooKeeper reference
     private volatile ZooKeeperWatcher zooKeeper;
     // ZooKeeper-based master address tracker
@@ -506,8 +506,6 @@ public class HConnectionManager {
     private final Object metaRegionLock = new Object();
 
     private final Object userRegionLock = new Object();
-	
-    private final Object resetLock = new Object();
 
     private final Configuration conf;
     // Known region HServerAddress.toString() -> HRegionInterface
@@ -576,7 +574,7 @@ public class HConnectionManager {
           HConstants.DEFAULT_HBASE_CLIENT_PREFETCH_LIMIT);
 
       this.master = null;
-      this.resetting = false;
+      this.masterChecked = false;
     }
 
     private synchronized void ensureZookeeperTrackers()
@@ -664,7 +662,9 @@ public class HConnectionManager {
         this.master = null;
 
         for (int tries = 0;
-          !this.closed && this.master == null && tries < numRetries;
+          !this.closed &&
+          !this.masterChecked && this.master == null &&
+          tries < numRetries;
         tries++) {
 
           try {
@@ -703,6 +703,10 @@ public class HConnectionManager {
             throw new RuntimeException("Thread was interrupted while trying to connect to master.");
           }
         }
+        // Avoid re-checking in the future if this is a managed HConnection,
+        // even if we failed to acquire a master.
+        // (this is to retain the existing behavior before HBASE-5058)
+        this.masterChecked = managed;
 
         if (this.master == null) {
           if (sn == null) {
@@ -1682,12 +1686,7 @@ public class HConnectionManager {
           LOG.info("ZK session expired. This disconnect could have been" +
               " caused by a network partition or a long-running GC pause," +
               " either way it's recommended that you verify your environment.");
-          synchronized (resetLock) {
-            if (resetting) return;
-            this.resetting = true;
-          }
           resetZooKeeperTrackers();
-          this.resetting = false;
         }
         return;
       }
@@ -1757,6 +1756,7 @@ public class HConnectionManager {
           HBaseRPC.stopProxy(master);
         }
         master = null;
+        masterChecked = false;
       }
       if (stopProxy) {
         for (HRegionInterface i : servers.values()) {

Modified: hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java?rev=1425513&r1=1425512&r2=1425513&view=diff
==============================================================================
--- hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java (original)
+++ hbase/branches/0.94-test/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java Sun Dec 23 19:34:53 2012
@@ -380,7 +380,6 @@ public interface HTableInterface extends
    * @throws IOException if a remote or network exception occurs.
    * @see RowLock
    * @see #unlockRow
-   * @deprecated {@link RowLock} and associated operations are deprecated
    */
   RowLock lockRow(byte[] row) throws IOException;
 
@@ -391,7 +390,6 @@ public interface HTableInterface extends
    * @throws IOException if a remote or network exception occurs.
    * @see RowLock
    * @see #unlockRow
-   * @deprecated {@link RowLock} and associated operations are deprecated
    */
   void unlockRow(RowLock rl) throws IOException;