You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2012/12/22 00:39:31 UTC

svn commit: r1425198 [2/2] - in /hbase/trunk/hbase-server/src: main/java/org/apache/hadoop/hbase/ main/java/org/apache/hadoop/hbase/client/ main/java/org/apache/hadoop/hbase/filter/ main/java/org/apache/hadoop/hbase/io/ main/java/org/apache/hadoop/hbas...

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java Fri Dec 21 23:39:30 2012
@@ -29,8 +29,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
 import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
 import org.apache.hadoop.hbase.thrift.generated.TCell;
@@ -54,7 +53,7 @@ public class ThriftUtilities {
       throws IllegalArgument {
     Compression.Algorithm comp =
       Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
-    StoreFile.BloomType bt =
+    BloomType bt =
       BloomType.valueOf(in.bloomFilterType);
 
     if (in.name == null || !in.name.hasRemaining()) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java Fri Dec 21 23:39:30 2012
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 
 /**
  * Handles Bloom filter initialization based on configuration and serialized

Modified: hbase/trunk/hbase-server/src/main/resources/hbase-default.xml
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/resources/hbase-default.xml?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/resources/hbase-default.xml (original)
+++ hbase/trunk/hbase-server/src/main/resources/hbase-default.xml Fri Dec 21 23:39:30 2012
@@ -521,11 +521,19 @@
           block is finished.
       </description>
   </property>
+
+  <property>
+    <name>hbase.rpc.client.engine</name>
+    <value>org.apache.hadoop.hbase.ipc.ProtobufRpcClientEngine</value>
+    <description>Implementation of org.apache.hadoop.hbase.ipc.RpcClientEngine to be
+    used for client RPC call marshalling.
+    </description>
+  </property>
   <property>
-    <name>hbase.rpc.engine</name>
-    <value>org.apache.hadoop.hbase.ipc.ProtobufRpcEngine</value>
-    <description>Implementation of org.apache.hadoop.hbase.ipc.RpcEngine to be
-    used for client / server RPC call marshalling.
+    <name>hbase.rpc.server.engine</name>
+    <value>org.apache.hadoop.hbase.ipc.ProtobufRpcServerEngine</value>
+    <description>Implementation of org.apache.hadoop.hbase.ipc.RpcServerEngine to be
+    used for server RPC call marshalling.
     </description>
   </property>
   <property>

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Fri Dec 21 23:39:30 2012
@@ -70,12 +70,12 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -188,7 +188,7 @@ public class HBaseTestingUtility extends
     List<Object[]> configurations = new ArrayList<Object[]>();
     for (Compression.Algorithm comprAlgo :
          HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-      for (StoreFile.BloomType bloomType : StoreFile.BloomType.values()) {
+      for (BloomType bloomType : BloomType.values()) {
         configurations.add(new Object[] { comprAlgo, bloomType });
       }
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java Fri Dec 21 23:39:30 2012
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTru
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.junit.experimental.categories.Category;
 
@@ -48,7 +49,7 @@ public class TestHColumnDescriptor {
     boolean inmemory = hcd.isInMemory();
     hcd.setScope(v);
     hcd.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
-    hcd.setBloomFilterType(StoreFile.BloomType.ROW);
+    hcd.setBloomFilterType(BloomType.ROW);
     hcd.setCompressionType(Algorithm.SNAPPY);
 
 
@@ -65,7 +66,7 @@ public class TestHColumnDescriptor {
     assertEquals(hcd.getScope(), deserializedHcd.getScope());
     assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY));
     assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF));
-    assertTrue(deserializedHcd.getBloomFilterType().equals(StoreFile.BloomType.ROW));
+    assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW));
   }
 
   @Test

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java Fri Dec 21 23:39:30 2012
@@ -479,8 +479,6 @@ public class TestHbaseObjectWritable ext
     assertEquals(18,HbaseObjectWritable.getClassCode(HColumnDescriptor.class).intValue());
     assertEquals(19,HbaseObjectWritable.getClassCode(HConstants.Modify.class).intValue());
     // 20 and 21 are place holders for HMsg
-    assertEquals(22,HbaseObjectWritable.getClassCode(HRegion.class).intValue());
-    assertEquals(23,HbaseObjectWritable.getClassCode(HRegion[].class).intValue());
     assertEquals(24,HbaseObjectWritable.getClassCode(HRegionInfo.class).intValue());
     assertEquals(25,HbaseObjectWritable.getClassCode(HRegionInfo[].class).intValue());
     // Intentional hole... these objects have been removed.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java Fri Dec 21 23:39:30 2012
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -88,7 +88,7 @@ public class TestCacheOnWrite {
   private static final int NUM_KV = 25000;
   private static final int INDEX_BLOCK_SIZE = 512;
   private static final int BLOOM_BLOCK_SIZE = 4096;
-  private static final BloomType BLOOM_TYPE = StoreFile.BloomType.ROWCOL;
+  private static final BloomType BLOOM_TYPE = BloomType.ROWCOL;
   private static final ChecksumType CKTYPE = ChecksumType.CRC32;
   private static final int CKBYTES = 512;
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java Fri Dec 21 23:39:30 2012
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -43,7 +43,7 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
-/**W
+/**
  * Make sure we always cache important block types, such as index blocks, as
  * long as we have a block cache, even though block caching might be disabled
  * for the column family.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java Fri Dec 21 23:39:30 2012
@@ -41,7 +41,7 @@ import com.google.protobuf.ServiceExcept
  * Make sure to call setProtocolEngine to have the client actually use the RpcEngine
  * for a specific protocol
  */
-public class RandomTimeoutRpcEngine extends ProtobufRpcEngine {
+public class RandomTimeoutRpcEngine extends ProtobufRpcClientEngine {
 
   private static final Random RANDOM = new Random(System.currentTimeMillis());
   public static double chanceOfTimeout = 0.3;
@@ -67,7 +67,7 @@ public class RandomTimeoutRpcEngine exte
    * Call this in order to set this class to run as the RpcEngine for the given protocol
    */
   public static void setProtocolEngine(Configuration conf, Class protocol) {
-    HBaseRPC.setProtocolEngine(conf, protocol, RandomTimeoutRpcEngine.class);
+    HBaseClientRPC.setProtocolEngine(conf, protocol, RandomTimeoutRpcEngine.class);
   }
 
   /**

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java Fri Dec 21 23:39:30 2012
@@ -70,12 +70,12 @@ public class TestDelayedRpc {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
 
-    rpcServer = HBaseRPC.getServer(new TestRpcImpl(delayReturnValue),
+    rpcServer = HBaseServerRPC.getServer(new TestRpcImpl(delayReturnValue),
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
 
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
+    TestRpc client = (TestRpc) HBaseClientRPC.getProxy(TestRpc.class, 0,
         rpcServer.getListenerAddress(), conf, 1000);
 
     List<Integer> results = new ArrayList<Integer>();
@@ -133,11 +133,11 @@ public class TestDelayedRpc {
     log.setLevel(Level.WARN);
 
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-    rpcServer = HBaseRPC.getServer(new TestRpcImpl(true),
+    rpcServer = HBaseServerRPC.getServer(new TestRpcImpl(true),
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
+    TestRpc client = (TestRpc) HBaseClientRPC.getProxy(TestRpc.class, 0,
         rpcServer.getListenerAddress(), conf, 1000);
 
     Thread threads[] = new Thread[MAX_DELAYED_RPC + 1];
@@ -264,12 +264,12 @@ public class TestDelayedRpc {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
 
-    rpcServer = HBaseRPC.getServer(new FaultyTestRpc(),
+    rpcServer = HBaseServerRPC.getServer(new FaultyTestRpc(),
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
 
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
+    TestRpc client = (TestRpc) HBaseClientRPC.getProxy(TestRpc.class, 0,
         rpcServer.getListenerAddress(), conf, 1000);
 
     int result = 0xDEADBEEF;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java Fri Dec 21 23:39:30 2012
@@ -96,12 +96,13 @@ public class TestProtoBufRpc {
   public  void setUp() throws IOException { // Setup server for both protocols
     conf = new Configuration();
     // Set RPC engine to protobuf RPC engine
-    HBaseRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class);
+    HBaseClientRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcClientEngine.class);
+    HBaseServerRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcServerEngine.class);
 
     // Create server side implementation
     PBServerImpl serverImpl = new PBServerImpl();
     // Get RPC server for server side implementation
-    server = HBaseRPC.getServer(TestRpcService.class,serverImpl, 
+    server = HBaseServerRPC.getServer(TestRpcService.class,serverImpl,
         new Class[]{TestRpcService.class}, 
         ADDRESS, PORT, 10, 10, true, conf, 0);
     addr = server.getListenerAddress();
@@ -116,9 +117,10 @@ public class TestProtoBufRpc {
 
   private static TestRpcService getClient() throws IOException {
     // Set RPC engine to protobuf RPC engine
-    HBaseRPC.setProtocolEngine(conf, TestRpcService.class,
-        ProtobufRpcEngine.class);
-    return (TestRpcService) HBaseRPC.getProxy(TestRpcService.class, 0, 
+    HBaseClientRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcClientEngine.class);
+    HBaseServerRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcServerEngine.class);
+
+    return (TestRpcService) HBaseClientRPC.getProxy(TestRpcService.class, 0,
         addr, conf, 10000);
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Fri Dec 21 23:39:30 2012
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.*;
 import org.junit.experimental.categories.Category;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java Fri Dec 21 23:39:30 2012
@@ -27,7 +27,7 @@ import java.net.SocketTimeoutException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.ipc.HBaseRPC;
+import org.apache.hadoop.hbase.ipc.HBaseClientRPC;
 import org.apache.hadoop.hbase.MasterMonitorProtocol;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest;
@@ -55,8 +55,8 @@ public class TestHMasterRPCException {
     //try to connect too soon. Retry on SocketTimeoutException.
     while (i < 20) { 
       try {
-        MasterMonitorProtocol inf = (MasterMonitorProtocol) HBaseRPC.getProxy(
-            MasterMonitorProtocol.class,  MasterMonitorProtocol.VERSION, isa, conf, 100 * 10);
+        MasterMonitorProtocol inf = (MasterMonitorProtocol) HBaseClientRPC.getProxy(
+            MasterMonitorProtocol.class, MasterMonitorProtocol.VERSION, isa, conf, 100 * 10);
         inf.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
         fail();
       } catch (ServiceException ex) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java Fri Dec 21 23:39:30 2012
@@ -101,7 +101,7 @@ public class CreateRandomStoreFile {
             + Arrays.toString(Compression.Algorithm.values()));
     options.addOption(BLOOM_FILTER_OPTION, "bloom_filter", true,
         "Bloom filter type, one of "
-            + Arrays.toString(StoreFile.BloomType.values()));
+            + Arrays.toString(BloomType.values()));
     options.addOption(BLOCK_SIZE_OPTION, "block_size", true,
         "HFile block size");
     options.addOption(BLOOM_BLOCK_SIZE_OPTION, "bloom_block_size", true,
@@ -162,9 +162,9 @@ public class CreateRandomStoreFile {
           cmdLine.getOptionValue(COMPRESSION_OPTION));
     }
 
-    StoreFile.BloomType bloomType = StoreFile.BloomType.NONE;
+    BloomType bloomType = BloomType.NONE;
     if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
-      bloomType = StoreFile.BloomType.valueOf(cmdLine.getOptionValue(
+      bloomType = BloomType.valueOf(cmdLine.getOptionValue(
           BLOOM_FILTER_OPTION));
     }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java Fri Dec 21 23:39:30 2012
@@ -367,8 +367,6 @@ public class DataBlockEncodingTool {
   /**
    * Check decompress performance of a given algorithm and print it.
    * @param algorithm Compression algorithm.
-   * @param compressorCodec Compressor to be tested.
-   * @param decompressorCodec Decompressor of the same algorithm.
    * @param name Name of algorithm.
    * @param buffer Buffer to be compressed.
    * @param offset Position of the beginning of the data.
@@ -584,7 +582,7 @@ public class DataBlockEncodingTool {
     CacheConfig cacheConf = new CacheConfig(conf);
     FileSystem fs = FileSystem.get(conf);
     StoreFile hsf = new StoreFile(fs, path, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
 
     StoreFile.Reader reader = hsf.createReader();
     reader.loadFileInfo();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java Fri Dec 21 23:39:30 2012
@@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 
 /**
  * Test seek performance for encoded data blocks. Read an HFile and do several

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java Fri Dec 21 23:39:30 2012
@@ -146,7 +146,7 @@ public class HFileReadWriteTest {
   private HFileDataBlockEncoder dataBlockEncoder =
       NoOpDataBlockEncoder.INSTANCE;
 
-  private StoreFile.BloomType bloomType = StoreFile.BloomType.NONE;
+  private BloomType bloomType = BloomType.NONE;
   private int blockSize;
   private Compression.Algorithm compression = Compression.Algorithm.NONE;
 
@@ -178,7 +178,7 @@ public class HFileReadWriteTest {
         + Arrays.toString(Compression.Algorithm.values()) +
         Workload.MERGE.onlyUsedFor());
     options.addOption(BLOOM_FILTER_OPTION, true, "Bloom filter type, one of "
-        + Arrays.toString(StoreFile.BloomType.values()) +
+        + Arrays.toString(BloomType.values()) +
         Workload.MERGE.onlyUsedFor());
     options.addOption(BLOCK_SIZE_OPTION, true, "HFile block size" +
         Workload.MERGE.onlyUsedFor());
@@ -239,7 +239,7 @@ public class HFileReadWriteTest {
     }
 
     if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
-      bloomType = StoreFile.BloomType.valueOf(cmdLine.getOptionValue(
+      bloomType = BloomType.valueOf(cmdLine.getOptionValue(
           BLOOM_FILTER_OPTION));
     }
 
@@ -468,7 +468,7 @@ public class HFileReadWriteTest {
     // We are passing the ROWCOL Bloom filter type, but StoreFile will still
     // use the Bloom filter type specified in the HFile.
     return new StoreFile(fs, filePath, conf, cacheConf,
-        StoreFile.BloomType.ROWCOL, dataBlockEncoder);
+        BloomType.ROWCOL, dataBlockEncoder);
   }
 
   public static int charToHex(int c) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java Fri Dec 21 23:39:30 2012
@@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.client.Sc
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.junit.Test;
@@ -89,7 +88,7 @@ public class TestBlocksRead extends HBas
    * @param tableName
    * @param callingMethod
    * @param conf
-   * @param families
+   * @param family
    * @throws IOException
    * @return created and initialized region.
    */

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java Fri Dec 21 23:39:30 2012
@@ -34,7 +34,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
@@ -49,7 +48,6 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.HFileReaderV2;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
 import org.apache.hadoop.hbase.util.Bytes;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java Fri Dec 21 23:39:30 2012
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.ByteBloomFilter;
 import org.apache.hadoop.hbase.util.Bytes;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java Fri Dec 21 23:39:30 2012
@@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.fs.HFileS
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Assume;
 import org.junit.Test;
@@ -81,7 +80,7 @@ public class TestFSErrorsExposed {
         writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
 
     StoreFile sf = new StoreFile(fs, writer.getPath(),
-        util.getConfiguration(), cacheConf, StoreFile.BloomType.NONE,
+        util.getConfiguration(), cacheConf, BloomType.NONE,
         NoOpDataBlockEncoder.INSTANCE);
 
     StoreFile.Reader reader = sf.createReader();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Fri Dec 21 23:39:30 2012
@@ -75,7 +75,6 @@ import org.apache.hadoop.hbase.monitorin
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java Fri Dec 21 23:39:30 2012
@@ -103,31 +103,6 @@ public class TestHRegionInfo {
                  + id + "." + md5HashInHex + ".",
                  nameStr);
   }
-
-  @Test
-  public void testGetSetOfHTD() throws IOException {
-    HBaseTestingUtility HTU = new HBaseTestingUtility();
-    final String tablename = "testGetSetOfHTD";
-
-    // Delete the temporary table directory that might still be there from the
-    // previous test run.
-    FSTableDescriptors.deleteTableDescriptorIfExists(tablename,
-        HTU.getConfiguration());
-
-    HTableDescriptor htd = new HTableDescriptor(tablename);
-    FSTableDescriptors.createTableDescriptor(htd, HTU.getConfiguration());
-    HRegionInfo hri = new HRegionInfo(Bytes.toBytes("testGetSetOfHTD"),
-        HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
-    HTableDescriptor htd2 = hri.getTableDesc();
-    assertTrue(htd.equals(htd2));
-    final String key = "SOME_KEY";
-    assertNull(htd.getValue(key));
-    final String value = "VALUE";
-    htd.setValue(key, value);
-    hri.setTableDesc(htd);
-    HTableDescriptor htd3 = hri.getTableDesc();
-    assertTrue(htd.equals(htd3));
-  }
   
   @Test
   public void testContainsRange() {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java Fri Dec 21 23:39:30 2012
@@ -107,7 +107,7 @@ public class TestMultiColumnScanner {
   private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
 
   private final Compression.Algorithm comprAlgo;
-  private final StoreFile.BloomType bloomType;
+  private final BloomType bloomType;
   private final DataBlockEncoding dataBlockEncoding;
 
   // Some static sanity-checking.
@@ -133,7 +133,7 @@ public class TestMultiColumnScanner {
   }
 
   public TestMultiColumnScanner(Compression.Algorithm comprAlgo,
-      StoreFile.BloomType bloomType, boolean useDataBlockEncoding) {
+      BloomType bloomType, boolean useDataBlockEncoding) {
     this.comprAlgo = comprAlgo;
     this.bloomType = bloomType;
     this.dataBlockEncoding = useDataBlockEncoding ? DataBlockEncoding.PREFIX :

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java Fri Dec 21 23:39:30 2012
@@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.SmallTest
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -74,7 +72,7 @@ public class TestScanWithBloomError {
   private static final byte[] ROW_BYTES = Bytes.toBytes(ROW);
   private static NavigableSet<Integer> allColIds = new TreeSet<Integer>();
   private HRegion region;
-  private StoreFile.BloomType bloomType;
+  private BloomType bloomType;
   private FileSystem fs;
   private Configuration conf;
 
@@ -84,13 +82,13 @@ public class TestScanWithBloomError {
   @Parameters
   public static final Collection<Object[]> parameters() {
     List<Object[]> configurations = new ArrayList<Object[]>();
-    for (StoreFile.BloomType bloomType : StoreFile.BloomType.values()) {
+    for (BloomType bloomType : BloomType.values()) {
       configurations.add(new Object[] { bloomType });
     }
     return configurations;
   }
 
-  public TestScanWithBloomError(StoreFile.BloomType bloomType) {
+  public TestScanWithBloomError(BloomType bloomType) {
     this.bloomType = bloomType;
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java Fri Dec 21 23:39:30 2012
@@ -44,8 +44,6 @@ import org.apache.hadoop.hbase.client.De
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.After;
 import org.junit.Before;
@@ -115,7 +113,7 @@ public class TestSeekOptimizations {
   private List<KeyValue> expectedKVs = new ArrayList<KeyValue>();
 
   private Compression.Algorithm comprAlgo;
-  private StoreFile.BloomType bloomType;
+  private BloomType bloomType;
 
   private long totalSeekDiligent, totalSeekLazy;
   
@@ -128,7 +126,7 @@ public class TestSeekOptimizations {
   }
 
   public TestSeekOptimizations(Compression.Algorithm comprAlgo,
-      StoreFile.BloomType bloomType) {
+      BloomType bloomType) {
     this.comprAlgo = comprAlgo;
     this.bloomType = bloomType;
   }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java Fri Dec 21 23:39:30 2012
@@ -208,7 +208,7 @@ public class TestSplitTransaction {
     TEST_UTIL.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 1);
     try {
       for (Store store : this.parent.stores.values()) {
-        store.getFamily().setBloomFilterType(StoreFile.BloomType.ROW);
+        store.getFamily().setBloomFilterType(BloomType.ROW);
       }
       testWholesomeSplit();
     } finally {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Fri Dec 21 23:39:30 2012
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -99,7 +99,7 @@ public class TestStoreFile extends HBase
             .build();
     writeStoreFile(writer);
     checkHalfHFile(new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE));
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE));
   }
 
   private void writeStoreFile(final StoreFile.Writer writer) throws IOException {
@@ -142,7 +142,7 @@ public class TestStoreFile extends HBase
             .build();
     writeStoreFile(writer);
     StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     StoreFile.Reader reader = hsf.createReader();
     // Split on a row, not in middle of row.  Midkey returned by reader
     // may be in middle of row.  Create new one with empty column and
@@ -154,7 +154,7 @@ public class TestStoreFile extends HBase
     // Make a reference
     Path refPath = StoreFile.split(fs, dir, hsf, midRow, true);
     StoreFile refHsf = new StoreFile(this.fs, refPath, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     // Now confirm that I can read from the reference and that it only gets
     // keys from top half of the file.
     HFileScanner s = refHsf.createReader().getScanner(false, false);
@@ -191,7 +191,7 @@ public class TestStoreFile extends HBase
 
     // Try to open store file from link
     StoreFile hsf = new StoreFile(this.fs, linkFilePath, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     assertTrue(hsf.isLink());
 
     // Now confirm that I can read from the link
@@ -284,10 +284,10 @@ public class TestStoreFile extends HBase
       topPath = StoreFile.split(this.fs, topDir, f, badmidkey, true);
       bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey, false);
       top = new StoreFile(this.fs, topPath, conf, cacheConf,
-          StoreFile.BloomType.NONE,
+          BloomType.NONE,
           NoOpDataBlockEncoder.INSTANCE).createReader();
       bottom = new StoreFile(this.fs, bottomPath, conf, cacheConf,
-          StoreFile.BloomType.NONE,
+          BloomType.NONE,
           NoOpDataBlockEncoder.INSTANCE).createReader();
       bottomScanner = bottom.getScanner(false, false);
       int count = 0;
@@ -330,10 +330,10 @@ public class TestStoreFile extends HBase
       topPath = StoreFile.split(this.fs, topDir, f, badmidkey, true);
       bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey, false);
       top = new StoreFile(this.fs, topPath, conf, cacheConf,
-          StoreFile.BloomType.NONE,
+          BloomType.NONE,
           NoOpDataBlockEncoder.INSTANCE).createReader();
       bottom = new StoreFile(this.fs, bottomPath, conf, cacheConf,
-          StoreFile.BloomType.NONE,
+          BloomType.NONE,
           NoOpDataBlockEncoder.INSTANCE).createReader();
       first = true;
       bottomScanner = bottom.getScanner(false, false);
@@ -433,7 +433,7 @@ public class TestStoreFile extends HBase
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL)
             .withFilePath(f)
-            .withBloomType(StoreFile.BloomType.ROW)
+            .withBloomType(BloomType.ROW)
             .withMaxKeyCount(2000)
             .withChecksumType(CKTYPE)
             .withBytesPerChecksum(CKBYTES)
@@ -510,8 +510,8 @@ public class TestStoreFile extends HBase
     int versions = 2;
 
     // run once using columns and once using rows
-    StoreFile.BloomType[] bt =
-      {StoreFile.BloomType.ROWCOL, StoreFile.BloomType.ROW};
+    BloomType[] bt =
+      {BloomType.ROWCOL, BloomType.ROW};
     int[] expKeys    = {rowCount*colCount, rowCount};
     // below line deserves commentary.  it is expected bloom false positives
     //  column = rowCount*2*colCount inserts
@@ -569,7 +569,7 @@ public class TestStoreFile extends HBase
               scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE);
           boolean shouldRowExist = i % 2 == 0;
           boolean shouldColExist = j % 2 == 0;
-          shouldColExist = shouldColExist || bt[x] == StoreFile.BloomType.ROW;
+          shouldColExist = shouldColExist || bt[x] == BloomType.ROW;
           if (shouldRowExist && shouldColExist) {
             if (!exists) falseNeg++;
           } else {
@@ -602,7 +602,7 @@ public class TestStoreFile extends HBase
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL)
             .withFilePath(f)
-            .withBloomType(StoreFile.BloomType.ROW)
+            .withBloomType(BloomType.ROW)
             .withMaxKeyCount(2000)
             .withChecksumType(CKTYPE)
             .withBytesPerChecksum(CKBYTES)
@@ -620,7 +620,7 @@ public class TestStoreFile extends HBase
     writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL)
             .withFilePath(f)
-            .withBloomType(StoreFile.BloomType.ROW)
+            .withBloomType(BloomType.ROW)
             .withMaxKeyCount(27244696)
             .build();
     assertTrue(writer.hasGeneralBloom());
@@ -631,7 +631,7 @@ public class TestStoreFile extends HBase
     writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL)
             .withFilePath(f)
-            .withBloomType(StoreFile.BloomType.ROW)
+            .withBloomType(BloomType.ROW)
             .withMaxKeyCount(Integer.MAX_VALUE)
             .withChecksumType(CKTYPE)
             .withBytesPerChecksum(CKBYTES)
@@ -735,7 +735,7 @@ public class TestStoreFile extends HBase
     writer.close();
 
     StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     StoreFile.Reader reader = hsf.createReader();
     StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
     TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
@@ -778,7 +778,7 @@ public class TestStoreFile extends HBase
     Path pathCowOff = new Path(baseDir, "123456789");
     StoreFile.Writer writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
     StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     LOG.debug(hsf.getPath().toString());
 
     // Read this file, we should see 3 misses
@@ -800,7 +800,7 @@ public class TestStoreFile extends HBase
     Path pathCowOn = new Path(baseDir, "123456788");
     writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
     hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
 
     // Read this file, we should see 3 hits
     reader = hsf.createReader();
@@ -816,13 +816,13 @@ public class TestStoreFile extends HBase
 
     // Let's read back the two files to ensure the blocks exactly match
     hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     StoreFile.Reader readerOne = hsf.createReader();
     readerOne.loadFileInfo();
     StoreFileScanner scannerOne = readerOne.getStoreFileScanner(true, true);
     scannerOne.seek(KeyValue.LOWESTKEY);
     hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     StoreFile.Reader readerTwo = hsf.createReader();
     readerTwo.loadFileInfo();
     StoreFileScanner scannerTwo = readerTwo.getStoreFileScanner(true, true);
@@ -853,7 +853,7 @@ public class TestStoreFile extends HBase
     conf.setBoolean("hbase.rs.evictblocksonclose", true);
     cacheConf = new CacheConfig(conf);
     hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     reader = hsf.createReader();
     reader.close(cacheConf.shouldEvictOnClose());
 
@@ -867,7 +867,7 @@ public class TestStoreFile extends HBase
     conf.setBoolean("hbase.rs.evictblocksonclose", false);
     cacheConf = new CacheConfig(conf);
     hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     reader = hsf.createReader();
     reader.close(cacheConf.shouldEvictOnClose());
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java Fri Dec 21 23:39:30 2012
@@ -42,8 +42,9 @@ import org.apache.hadoop.hbase.catalog.C
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
-import org.apache.hadoop.hbase.ipc.HBaseRPC;
+import org.apache.hadoop.hbase.ipc.HBaseClientRPC;
 import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.HBaseServerRPC;
 import org.apache.hadoop.hbase.ipc.ProtocolSignature;
 import org.apache.hadoop.hbase.ipc.RequestContext;
 import org.apache.hadoop.hbase.ipc.RpcServer;
@@ -122,7 +123,7 @@ public class TestTokenAuthentication {
         throw new IllegalArgumentException("Failed resolve of " + initialIsa);
       }
 
-      this.rpcServer = HBaseRPC.getServer(TokenServer.class, this,
+      this.rpcServer = HBaseServerRPC.getServer(TokenServer.class, this,
           new Class<?>[]{AuthenticationProtos.AuthenticationService.Interface.class},
           initialIsa.getHostName(), // BindAddress is IP we got for this server.
           initialIsa.getPort(),
@@ -377,7 +378,7 @@ public class TestTokenAuthentication {
         c.set(HConstants.CLUSTER_ID, clusterId.toString());
         AuthenticationProtos.AuthenticationService.BlockingInterface proxy =
             (AuthenticationProtos.AuthenticationService.BlockingInterface)
-            HBaseRPC.waitForProxy(BlockingAuthenticationService.class,
+            HBaseClientRPC.waitForProxy(BlockingAuthenticationService.class,
                 BlockingAuthenticationService.VERSION,
                 server.getAddress(), c,
                 HConstants.DEFAULT_HBASE_CLIENT_RPC_MAXATTEMPTS,

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java?rev=1425198&r1=1425197&r2=1425198&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java Fri Dec 21 23:39:30 2012
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.PleaseHol
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 
 /**
  * A command-line utility that reads, writes, and verifies data. Unlike
@@ -67,7 +67,7 @@ public class LoadTestTool extends Abstra
       "<verify_percent>[:<#threads=" + DEFAULT_NUM_THREADS + ">]";
 
   private static final String OPT_USAGE_BLOOM = "Bloom filter type, one of " +
-      Arrays.toString(StoreFile.BloomType.values());
+      Arrays.toString(BloomType.values());
 
   private static final String OPT_USAGE_COMPRESSION = "Compression type, " +
       "one of " + Arrays.toString(Compression.Algorithm.values());
@@ -115,7 +115,7 @@ public class LoadTestTool extends Abstra
   private DataBlockEncoding dataBlockEncodingAlgo;
   private boolean encodeInCacheOnly;
   private Compression.Algorithm compressAlgo;
-  private StoreFile.BloomType bloomType;
+  private BloomType bloomType;
 
   // Writer options
   private int numWriterThreads = DEFAULT_NUM_THREADS;
@@ -317,7 +317,7 @@ public class LoadTestTool extends Abstra
 
     String bloomStr = cmd.getOptionValue(OPT_BLOOM);
     bloomType = bloomStr == null ? null :
-        StoreFile.BloomType.valueOf(bloomStr);
+        BloomType.valueOf(bloomStr);
   }
 
   public void initTestTable() throws IOException {