You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/02/14 13:58:21 UTC

svn commit: r1446147 [29/35] - in /hbase/branches/hbase-7290v2: ./ bin/ conf/ dev-support/ hbase-client/ hbase-common/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ hbase-common/src/...

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java Thu Feb 14 12:58:12 2013
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -88,7 +88,7 @@ public class TestCacheOnWrite {
   private static final int NUM_KV = 25000;
   private static final int INDEX_BLOCK_SIZE = 512;
   private static final int BLOOM_BLOCK_SIZE = 4096;
-  private static final BloomType BLOOM_TYPE = StoreFile.BloomType.ROWCOL;
+  private static final BloomType BLOOM_TYPE = BloomType.ROWCOL;
   private static final ChecksumType CKTYPE = ChecksumType.CRC32;
   private static final int CKBYTES = 512;
 

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java Thu Feb 14 12:58:12 2013
@@ -135,6 +135,11 @@ public class TestCachedBlockQueue extend
               return null;
             }
 
+            @Override
+            public BlockType getBlockType() {
+              return BlockType.DATA;
+            }
+
           }, accessTime, false);
     }
   }

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java Thu Feb 14 12:58:12 2013
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -43,7 +43,7 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
-/**W
+/**
  * Make sure we always cache important block types, such as index blocks, as
  * long as we have a block cache, even though block caching might be disabled
  * for the column family.
@@ -80,8 +80,6 @@ public class TestForceCacheImportantBloc
   public static Collection<Object[]> parameters() {
     // HFile versions
     return Arrays.asList(new Object[][] {
-        new Object[] { new Integer(1), false },
-        new Object[] { new Integer(1), true },
         new Object[] { new Integer(2), false },
         new Object[] { new Integer(2), true }
     });
@@ -114,7 +112,7 @@ public class TestForceCacheImportantBloc
 
     for (int i = 0; i < NUM_ROWS; ++i) {
       Get get = new Get(Bytes.toBytes("row" + i));
-      region.get(get, null);
+      region.get(get);
     }
 
     List<BlockCategory> importantBlockCategories =

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java Thu Feb 14 12:58:12 2013
@@ -347,21 +347,25 @@ public class TestHFile extends HBaseTest
     assertTrue(Compression.Algorithm.LZ4.ordinal() == 4);
   }
 
+  // This can't be an anonymous class because the compiler will not generate
+  // a nullary constructor for it.
+  static class CustomKeyComparator extends KeyComparator {
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
+        int l2) {
+      return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
+    }
+    @Override
+    public int compare(byte[] o1, byte[] o2) {
+      return compare(o1, 0, o1.length, o2, 0, o2.length);
+    }
+  }
+
   public void testComparator() throws IOException {
     if (cacheConf == null) cacheConf = new CacheConfig(conf);
     Path mFile = new Path(ROOT_DIR, "meta.tfile");
     FSDataOutputStream fout = createFSOutput(mFile);
-    KeyComparator comparator = new KeyComparator() {
-      @Override
-      public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
-          int l2) {
-        return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
-      }
-      @Override
-      public int compare(byte[] o1, byte[] o2) {
-        return compare(o1, 0, o1.length, o2, 0, o2.length);
-      }
-    };
+    KeyComparator comparator = new CustomKeyComparator();
     Writer writer = HFile.getWriterFactory(conf, cacheConf)
         .withOutputStream(fout)
         .withBlockSize(minBlockSize)

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java Thu Feb 14 12:58:12 2013
@@ -258,41 +258,6 @@ public class TestHFileBlock {
   }
 
   @Test
-  public void testReaderV1() throws IOException {
-    for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
-      for (boolean pread : new boolean[] { false, true }) {
-        byte[] block = createTestV1Block(algo);
-        Path path = new Path(TEST_UTIL.getDataTestDir(),
-          "blocks_v1_"+ algo);
-        LOG.info("Creating temporary file at " + path);
-        FSDataOutputStream os = fs.create(path);
-        int totalSize = 0;
-        int numBlocks = 50;
-        for (int i = 0; i < numBlocks; ++i) {
-          os.write(block);
-          totalSize += block.length;
-        }
-        os.close();
-
-        FSDataInputStream is = fs.open(path);
-        HFileBlock.FSReader hbr = new HFileBlock.FSReaderV1(is, algo,
-            totalSize);
-        HFileBlock b;
-        int numBlocksRead = 0;
-        long pos = 0;
-        while (pos < totalSize) {
-          b = hbr.readBlockData(pos, block.length, uncompressedSizeV1, pread);
-          b.sanityCheck();
-          pos += block.length;
-          numBlocksRead++;
-        }
-        assertEquals(numBlocks, numBlocksRead);
-        is.close();
-      }
-    }
-  }
-
-  @Test
   public void testReaderV2() throws IOException {
     for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
       for (boolean pread : new boolean[] { false, true }) {

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java Thu Feb 14 12:58:12 2013
@@ -169,41 +169,6 @@ public class TestHFileBlockCompatibility
   }
 
   @Test
-  public void testReaderV1() throws IOException {
-    for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
-      for (boolean pread : new boolean[] { false, true }) {
-        byte[] block = createTestV1Block(algo);
-        Path path = new Path(TEST_UTIL.getDataTestDir(),
-          "blocks_v1_"+ algo);
-        LOG.info("Creating temporary file at " + path);
-        FSDataOutputStream os = fs.create(path);
-        int totalSize = 0;
-        int numBlocks = 50;
-        for (int i = 0; i < numBlocks; ++i) {
-          os.write(block);
-          totalSize += block.length;
-        }
-        os.close();
-
-        FSDataInputStream is = fs.open(path);
-        HFileBlock.FSReader hbr = new HFileBlock.FSReaderV1(is, algo,
-            totalSize);
-        HFileBlock b;
-        int numBlocksRead = 0;
-        long pos = 0;
-        while (pos < totalSize) {
-          b = hbr.readBlockData(pos, block.length, uncompressedSizeV1, pread);
-          b.sanityCheck();
-          pos += block.length;
-          numBlocksRead++;
-        }
-        assertEquals(numBlocks, numBlocksRead);
-        is.close();
-      }
-    }
-  }
-
-  @Test
   public void testReaderV2() throws IOException {
     for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
       for (boolean pread : new boolean[] { false, true }) {

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java Thu Feb 14 12:58:12 2013
@@ -77,7 +77,7 @@ public class TestLruBlockCache {
     int n = 0;
     while(cache.getEvictionCount() == 0) {
       Thread.sleep(200);
-      assertTrue(n++ < 10);
+      assertTrue(n++ < 20);
     }
     System.out.println("Background Evictions run: " + cache.getEvictionCount());
 
@@ -663,6 +663,11 @@ public class TestLruBlockCache {
     @Override
     public void serialize(ByteBuffer destination) {
     }
+    
+    @Override
+    public BlockType getBlockType() {
+      return BlockType.DATA;
+    }
 
   }
 

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java Thu Feb 14 12:58:12 2013
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Threads;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -109,20 +110,24 @@ public class TestScannerSelectionUsingTT
         HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()),
             conf, htd);
 
+    long ts = EnvironmentEdgeManager.currentTimeMillis();
+    long version = 0; //make sure each new set of Put's have a new ts
     for (int iFile = 0; iFile < totalNumFiles; ++iFile) {
       if (iFile == NUM_EXPIRED_FILES) {
         Threads.sleepWithoutInterrupt(TTL_MS);
+        version += TTL_MS;
       }
 
       for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
         Put put = new Put(Bytes.toBytes("row" + iRow));
         for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
           put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
-              Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
+              ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
         }
         region.put(put);
       }
       region.flushcache();
+      version++;
     }
 
     Scan scan = new Scan();

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/RandomTimeoutRpcEngine.java Thu Feb 14 12:58:12 2013
@@ -29,9 +29,9 @@ import java.net.SocketTimeoutException;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.hadoop.hbase.ipc.VersionedProtocol;
 import javax.net.SocketFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.IpcProtocol;
 import org.apache.hadoop.hbase.security.User;
 
 import com.google.protobuf.ServiceException;
@@ -41,33 +41,29 @@ import com.google.protobuf.ServiceExcept
  * Make sure to call setProtocolEngine to have the client actually use the RpcEngine
  * for a specific protocol
  */
-public class RandomTimeoutRpcEngine extends ProtobufRpcEngine {
+public class RandomTimeoutRpcEngine extends ProtobufRpcClientEngine {
 
   private static final Random RANDOM = new Random(System.currentTimeMillis());
   public static double chanceOfTimeout = 0.3;
   private static AtomicInteger invokations = new AtomicInteger();
-  
-  public VersionedProtocol getProxy(
-      Class<? extends VersionedProtocol> protocol, long clientVersion,
-      InetSocketAddress addr, User ticket,
-      Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException {
+
+  public RandomTimeoutRpcEngine(Configuration conf) {
+    super(conf);
+  }
+
+  @Override
+  public <T extends IpcProtocol> T getProxy(
+      Class<T> protocol, InetSocketAddress addr, Configuration conf, int rpcTimeout)
+  throws IOException {
     // Start up the requested-for proxy so we can pass-through calls to the underlying
     // RpcEngine.  Also instantiate and return our own proxy (RandomTimeoutInvocationHandler)
     // that will either throw exceptions or pass through to the underlying proxy.
-    VersionedProtocol actualProxy = super.getProxy(protocol, clientVersion, addr,
-      ticket, conf, factory, rpcTimeout);
+    T actualProxy = super.getProxy(protocol, addr, conf, rpcTimeout);
     RandomTimeoutInvocationHandler invoker =
       new RandomTimeoutInvocationHandler(actualProxy);
-    VersionedProtocol object = (VersionedProtocol)Proxy.newProxyInstance(
+    T wrapperProxy = (T)Proxy.newProxyInstance(
       protocol.getClassLoader(), new Class[]{protocol}, invoker);
-    return object;
-  }
-
-  /**
-   * Call this in order to set this class to run as the RpcEngine for the given protocol
-   */
-  public static void setProtocolEngine(Configuration conf, Class protocol) {
-    HBaseRPC.setProtocolEngine(conf, protocol, RandomTimeoutRpcEngine.class);
+    return wrapperProxy;
   }
 
   /**
@@ -78,9 +74,9 @@ public class RandomTimeoutRpcEngine exte
   }
 
   static class RandomTimeoutInvocationHandler implements InvocationHandler {
-    private VersionedProtocol actual = null;
+    private IpcProtocol actual = null;
 
-    public RandomTimeoutInvocationHandler(VersionedProtocol actual) {
+    public RandomTimeoutInvocationHandler(IpcProtocol actual) {
       this.actual = actual;
     }
 
@@ -96,4 +92,4 @@ public class RandomTimeoutRpcEngine exte
       return Proxy.getInvocationHandler(actual).invoke(proxy, method, args);
     }
   }
-}
+}
\ No newline at end of file

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java Thu Feb 14 12:58:12 2013
@@ -24,22 +24,20 @@ import static org.junit.Assert.assertTru
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
-import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.ipc.VersionedProtocol;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.IpcProtocol;
+import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse;
-import org.apache.hadoop.hbase.MediumTests;
 import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;
-import org.apache.log4j.Level;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mortbay.log.Log;
@@ -69,34 +67,39 @@ public class TestDelayedRpc {
   private void testDelayedRpc(boolean delayReturnValue) throws Exception {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-
-    rpcServer = HBaseRPC.getServer(new TestRpcImpl(delayReturnValue),
+    TestRpcImpl instance = new TestRpcImpl(delayReturnValue);
+    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
 
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
-        rpcServer.getListenerAddress(), conf, 1000);
+    ProtobufRpcClientEngine clientEngine = new ProtobufRpcClientEngine(conf);
+    try {
+      TestRpc client = clientEngine.getProxy(TestRpc.class,
+          rpcServer.getListenerAddress(), conf, 1000);
 
-    List<Integer> results = new ArrayList<Integer>();
+      List<Integer> results = new ArrayList<Integer>();
 
-    TestThread th1 = new TestThread(client, true, results);
-    TestThread th2 = new TestThread(client, false, results);
-    TestThread th3 = new TestThread(client, false, results);
-    th1.start();
-    Thread.sleep(100);
-    th2.start();
-    Thread.sleep(200);
-    th3.start();
-
-    th1.join();
-    th2.join();
-    th3.join();
-
-    assertEquals(UNDELAYED, results.get(0).intValue());
-    assertEquals(UNDELAYED, results.get(1).intValue());
-    assertEquals(results.get(2).intValue(), delayReturnValue ? DELAYED :
-        0xDEADBEEF);
+      TestThread th1 = new TestThread(client, true, results);
+      TestThread th2 = new TestThread(client, false, results);
+      TestThread th3 = new TestThread(client, false, results);
+      th1.start();
+      Thread.sleep(100);
+      th2.start();
+      Thread.sleep(200);
+      th3.start();
+
+      th1.join();
+      th2.join();
+      th3.join();
+
+      assertEquals(UNDELAYED, results.get(0).intValue());
+      assertEquals(UNDELAYED, results.get(1).intValue());
+      assertEquals(results.get(2).intValue(), delayReturnValue ? DELAYED :
+          0xDEADBEEF);
+    } finally {
+      clientEngine.close();
+    }
   }
 
   private static class ListAppender extends AppenderSkeleton {
@@ -133,40 +136,46 @@ public class TestDelayedRpc {
     log.setLevel(Level.WARN);
 
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-    rpcServer = HBaseRPC.getServer(new TestRpcImpl(true),
+    TestRpcImpl instance = new TestRpcImpl(true);
+    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
-        rpcServer.getListenerAddress(), conf, 1000);
 
-    Thread threads[] = new Thread[MAX_DELAYED_RPC + 1];
+    ProtobufRpcClientEngine clientEngine = new ProtobufRpcClientEngine(conf);
+    try {
+      TestRpc client = clientEngine.getProxy(TestRpc.class,
+          rpcServer.getListenerAddress(), conf, 1000);
 
-    for (int i = 0; i < MAX_DELAYED_RPC; i++) {
-      threads[i] = new TestThread(client, true, null);
-      threads[i].start();
-    }
+      Thread threads[] = new Thread[MAX_DELAYED_RPC + 1];
 
-    /* No warnings till here. */
-    assertTrue(listAppender.getMessages().isEmpty());
+      for (int i = 0; i < MAX_DELAYED_RPC; i++) {
+        threads[i] = new TestThread(client, true, null);
+        threads[i].start();
+      }
 
-    /* This should give a warning. */
-    threads[MAX_DELAYED_RPC] = new TestThread(client, true, null);
-    threads[MAX_DELAYED_RPC].start();
+      /* No warnings till here. */
+      assertTrue(listAppender.getMessages().isEmpty());
 
-    for (int i = 0; i < MAX_DELAYED_RPC; i++) {
-      threads[i].join();
-    }
+      /* This should give a warning. */
+      threads[MAX_DELAYED_RPC] = new TestThread(client, true, null);
+      threads[MAX_DELAYED_RPC].start();
 
-    assertFalse(listAppender.getMessages().isEmpty());
-    assertTrue(listAppender.getMessages().get(0).startsWith(
-        "Too many delayed calls"));
+      for (int i = 0; i < MAX_DELAYED_RPC; i++) {
+        threads[i].join();
+      }
 
-    log.removeAppender(listAppender);
+      assertFalse(listAppender.getMessages().isEmpty());
+      assertTrue(listAppender.getMessages().get(0).startsWith(
+          "Too many delayed calls"));
+
+      log.removeAppender(listAppender);
+    } finally {
+      clientEngine.close();
+    }
   }
 
-  public interface TestRpc extends VersionedProtocol {
-    public static final long VERSION = 1L;
+  public interface TestRpc extends IpcProtocol {
     TestResponse test(TestArg delay);
   }
 
@@ -180,7 +189,6 @@ public class TestDelayedRpc {
     /**
      * @param delayReturnValue Should the response to the delayed call be set
      * at the start or the end of the delay.
-     * @param delay Amount of milliseconds to delay the call by
      */
     public TestRpcImpl(boolean delayReturnValue) {
       this.delayReturnValue = delayReturnValue;
@@ -213,22 +221,6 @@ public class TestDelayedRpc {
       responseBuilder.setResponse(0xDEADBEEF);
       return responseBuilder.build();
     }
-
-    @Override
-    public long getProtocolVersion(String arg0, long arg1) throws IOException {
-      return 0;
-    }
-
-    @Override
-    public ProtocolSignature getProtocolSignature(String protocol,
-        long clientVersion, int clientMethodsHash) throws IOException {
-      Method [] methods = this.getClass().getMethods();
-      int [] hashes = new int [methods.length];
-      for (int i = 0; i < methods.length; i++) {
-        hashes[i] = methods[i].hashCode();
-      }
-      return new ProtocolSignature(clientVersion, hashes);
-    }
   }
 
   private static class TestThread extends Thread {
@@ -263,35 +255,40 @@ public class TestDelayedRpc {
   public void testEndDelayThrowing() throws IOException {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-
-    rpcServer = HBaseRPC.getServer(new FaultyTestRpc(),
+    FaultyTestRpc instance = new FaultyTestRpc();
+    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
         new Class<?>[]{ TestRpcImpl.class },
         isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
     rpcServer.start();
 
-    TestRpc client = (TestRpc) HBaseRPC.getProxy(TestRpc.class, 0,
-        rpcServer.getListenerAddress(), conf, 1000);
+    ProtobufRpcClientEngine clientEngine = new ProtobufRpcClientEngine(conf);
+    try {
+      TestRpc client = clientEngine.getProxy(TestRpc.class,
+          rpcServer.getListenerAddress(), conf, 1000);
 
-    int result = 0xDEADBEEF;
+      int result = 0xDEADBEEF;
 
-    try {
-      result = client.test(TestArg.newBuilder().setDelay(false).build()).getResponse();
-    } catch (Exception e) {
-      fail("No exception should have been thrown.");
-    }
-    assertEquals(result, UNDELAYED);
+      try {
+        result = client.test(TestArg.newBuilder().setDelay(false).build()).getResponse();
+      } catch (Exception e) {
+        fail("No exception should have been thrown.");
+      }
+      assertEquals(result, UNDELAYED);
 
-    boolean caughtException = false;
-    try {
-      result = client.test(TestArg.newBuilder().setDelay(true).build()).getResponse();
-    } catch(Exception e) {
-      // Exception thrown by server is enclosed in a RemoteException.
-      if (e.getCause().getMessage().contains(
-          "java.lang.Exception: Something went wrong"))
-        caughtException = true;
-      Log.warn(e);
+      boolean caughtException = false;
+      try {
+        result = client.test(TestArg.newBuilder().setDelay(true).build()).getResponse();
+      } catch(Exception e) {
+        // Exception thrown by server is enclosed in a RemoteException.
+        if (e.getCause().getMessage().contains(
+            "java.lang.Exception: Something went wrong"))
+          caughtException = true;
+        Log.warn(e);
+      }
+      assertTrue(caughtException);
+    } finally {
+      clientEngine.close();
     }
-    assertTrue(caughtException);
   }
 
   /**
@@ -312,18 +309,5 @@ public class TestDelayedRpc {
       // Client will receive the Exception, not this value.
       return TestResponse.newBuilder().setResponse(DELAYED).build();
     }
-
-    @Override
-    public long getProtocolVersion(String arg0, long arg1) throws IOException {
-      return 0;
-    }
-
-    @Override
-    public ProtocolSignature getProtocolSignature(String protocol,
-        long clientVersion, int clientMethodsHash) throws IOException {
-      return new ProtocolSignature(clientVersion, new int [] {});
-    }
   }
-
-}
-
+}
\ No newline at end of file

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java Thu Feb 14 12:58:12 2013
@@ -22,6 +22,7 @@ import java.net.InetSocketAddress;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.IpcProtocol;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto;
@@ -52,7 +53,7 @@ public class TestProtoBufRpc {
   private static RpcServer server;
 
   public interface TestRpcService
-      extends TestProtobufRpcProto.BlockingInterface, VersionedProtocol {
+      extends TestProtobufRpcProto.BlockingInterface, IpcProtocol {
     public long VERSION = 1;
   }
 
@@ -76,32 +77,16 @@ public class TestProtoBufRpc {
         EmptyRequestProto request) throws ServiceException {
       throw new ServiceException("error", new IOException("error"));
     }
-
-    @Override
-    public long getProtocolVersion(String protocol, long clientVersion)
-        throws IOException {
-      // TODO Auto-generated method stub
-      return 0;
-    }
-
-    @Override
-    public ProtocolSignature getProtocolSignature(String protocol,
-        long clientVersion, int clientMethodsHash) throws IOException {
-      // TODO Auto-generated method stub
-      return null;
-    }
   }
 
   @Before
   public  void setUp() throws IOException { // Setup server for both protocols
     conf = new Configuration();
-    // Set RPC engine to protobuf RPC engine
-    HBaseRPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class);
 
     // Create server side implementation
     PBServerImpl serverImpl = new PBServerImpl();
     // Get RPC server for server side implementation
-    server = HBaseRPC.getServer(TestRpcService.class,serverImpl, 
+    server = HBaseServerRPC.getServer(TestRpcService.class,serverImpl,
         new Class[]{TestRpcService.class}, 
         ADDRESS, PORT, 10, 10, true, conf, 0);
     addr = server.getListenerAddress();
@@ -114,37 +99,29 @@ public class TestProtoBufRpc {
     server.stop();
   }
 
-  private static TestRpcService getClient() throws IOException {
-    // Set RPC engine to protobuf RPC engine
-    HBaseRPC.setProtocolEngine(conf, TestRpcService.class,
-        ProtobufRpcEngine.class);
-    return (TestRpcService) HBaseRPC.getProxy(TestRpcService.class, 0, 
-        addr, conf, 10000);
-  }
-
   @Test
   public void testProtoBufRpc() throws Exception {
-    TestRpcService client = getClient();
-    testProtoBufRpc(client);
-  }
-  
-  // separated test out so that other tests can call it.
-  public static void testProtoBufRpc(TestRpcService client) throws Exception {  
-    // Test ping method
-    EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
-    client.ping(null, emptyRequest);
-    
-    // Test echo method
-    EchoRequestProto echoRequest = EchoRequestProto.newBuilder()
-        .setMessage("hello").build();
-    EchoResponseProto echoResponse = client.echo(null, echoRequest);
-    Assert.assertEquals(echoResponse.getMessage(), "hello");
-    
-    // Test error method - error should be thrown as RemoteException
+    ProtobufRpcClientEngine clientEngine = new ProtobufRpcClientEngine(conf);
     try {
-      client.error(null, emptyRequest);
-      Assert.fail("Expected exception is not thrown");
-    } catch (ServiceException e) {
+      TestRpcService client = clientEngine.getProxy(TestRpcService.class, addr, conf, 10000);
+      // Test ping method
+      EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
+      client.ping(null, emptyRequest);
+
+      // Test echo method
+      EchoRequestProto echoRequest = EchoRequestProto.newBuilder()
+          .setMessage("hello").build();
+      EchoResponseProto echoResponse = client.echo(null, echoRequest);
+      Assert.assertEquals(echoResponse.getMessage(), "hello");
+
+      // Test error method - error should be thrown as RemoteException
+      try {
+        client.error(null, emptyRequest);
+        Assert.fail("Expected exception is not thrown");
+      } catch (ServiceException e) {
+      }
+    } finally {
+      clientEngine.close();
     }
   }
-}
+}
\ No newline at end of file

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java Thu Feb 14 12:58:12 2013
@@ -18,8 +18,11 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.IOException;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -36,11 +39,14 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.junit.After;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -59,7 +65,7 @@ public class TestImportExport {
   private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING);
   private static final byte[] QUAL = Bytes.toBytes("q");
   private static final String OUTPUT_DIR = "outputdir";
-  private static final String EXPORT_BATCHING = "100";
+  private static final String EXPORT_BATCH_SIZE = "100";
 
   private static MiniHBaseCluster cluster;
   private static long now = System.currentTimeMillis();
@@ -126,8 +132,7 @@ public class TestImportExport {
     String[] args = new String[] {
         EXPORT_TABLE,
         OUTPUT_DIR,
-	EXPORT_BATCHING,
-        "1000"
+        "1000", // max number of key versions per key to export
     };
 
     GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
@@ -196,6 +201,47 @@ public class TestImportExport {
     assertTrue(job.isSuccessful());
   }
 
+  /**
+   * Test export scanner batching
+   */
+   @Test
+   public void testExportScannerBatching() throws Exception {
+    String BATCH_TABLE = "exportWithBatch";
+    HTableDescriptor desc = new HTableDescriptor(BATCH_TABLE);
+    desc.addFamily(new HColumnDescriptor(FAMILYA)
+        .setMaxVersions(1)
+    );
+    UTIL.getHBaseAdmin().createTable(desc);
+    HTable t = new HTable(UTIL.getConfiguration(), BATCH_TABLE);
+
+    Put p = new Put(ROW1);
+    p.add(FAMILYA, QUAL, now, QUAL);
+    p.add(FAMILYA, QUAL, now+1, QUAL);
+    p.add(FAMILYA, QUAL, now+2, QUAL);
+    p.add(FAMILYA, QUAL, now+3, QUAL);
+    p.add(FAMILYA, QUAL, now+4, QUAL);
+    t.put(p);
+
+    String[] args = new String[] {
+        "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE,  // added scanner batching arg.
+        BATCH_TABLE,
+        OUTPUT_DIR
+    };
+ 
+    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
+    Configuration conf = opts.getConfiguration();
+    args = opts.getRemainingArgs();
+    assertEquals(conf.get(Export.EXPORT_BATCHING), EXPORT_BATCH_SIZE);
+
+    Job job = Export.createSubmittableJob(conf, args);
+    job.getConfiguration().set("mapreduce.framework.name", "yarn");
+    job.waitForCompletion(false);
+    assertTrue(job.isSuccessful());
+    
+    FileSystem fs = FileSystem.get(UTIL.getConfiguration());
+    fs.delete(new Path(OUTPUT_DIR), true);
+  }
+
   @Test
   public void testWithDeletes() throws Exception {
     String EXPORT_TABLE = "exportWithDeletes";
@@ -215,7 +261,7 @@ public class TestImportExport {
     p.add(FAMILYA, QUAL, now+4, QUAL);
     t.put(p);
 
-    Delete d = new Delete(ROW1, now+3, null);
+    Delete d = new Delete(ROW1, now+3);
     t.delete(d);
     d = new Delete(ROW1);
     d.deleteColumns(FAMILYA, QUAL, now+2);
@@ -225,8 +271,7 @@ public class TestImportExport {
         "-D" + Export.RAW_SCAN + "=true",
         EXPORT_TABLE,
         OUTPUT_DIR,
-	EXPORT_BATCHING,
-        "1000"
+        "1000", // max number of key versions per key to export
     };
 
     GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
@@ -283,4 +328,99 @@ public class TestImportExport {
     assertEquals(now, res[6].getTimestamp());
     t.close();
   }
+
+  @Test
+  public void testWithFilter() throws Exception {
+    String EXPORT_TABLE = "exportSimpleCase_ImportWithFilter";
+    HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
+    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
+    UTIL.getHBaseAdmin().createTable(desc);
+    HTable exportTable = new HTable(UTIL.getConfiguration(), EXPORT_TABLE);
+
+    Put p = new Put(ROW1);
+    p.add(FAMILYA, QUAL, now, QUAL);
+    p.add(FAMILYA, QUAL, now + 1, QUAL);
+    p.add(FAMILYA, QUAL, now + 2, QUAL);
+    p.add(FAMILYA, QUAL, now + 3, QUAL);
+    p.add(FAMILYA, QUAL, now + 4, QUAL);
+    exportTable.put(p);
+
+    String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1000" };
+
+    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
+        cluster.getConfiguration()), args);
+    Configuration conf = opts.getConfiguration();
+    args = opts.getRemainingArgs();
+
+    Job job = Export.createSubmittableJob(conf, args);
+    job.getConfiguration().set("mapreduce.framework.name", "yarn");
+    job.waitForCompletion(false);
+    assertTrue(job.isSuccessful());
+
+    String IMPORT_TABLE = "importWithFilter";
+    desc = new HTableDescriptor(IMPORT_TABLE);
+    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
+    UTIL.getHBaseAdmin().createTable(desc);
+
+    HTable importTable = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
+    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
+        "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, OUTPUT_DIR,
+        "1000" };
+
+    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
+    conf = opts.getConfiguration();
+    args = opts.getRemainingArgs();
+
+    job = Import.createSubmittableJob(conf, args);
+    job.getConfiguration().set("mapreduce.framework.name", "yarn");
+    job.waitForCompletion(false);
+    assertTrue(job.isSuccessful());
+
+    // get the count of the source table for that time range
+    PrefixFilter filter = new PrefixFilter(ROW1);
+    int count = getCount(exportTable, filter);
+
+    Assert.assertEquals("Unexpected row count between export and import tables", count,
+      getCount(importTable, null));
+
+    // and then test that a broken command doesn't bork everything - easier here because we don't
+    // need to re-run the export job
+
+    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
+        "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE,
+        OUTPUT_DIR, "1000" };
+
+    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
+    conf = opts.getConfiguration();
+    args = opts.getRemainingArgs();
+
+    job = Import.createSubmittableJob(conf, args);
+    job.getConfiguration().set("mapreduce.framework.name", "yarn");
+    job.waitForCompletion(false);
+    assertFalse("Job succeeedd, but it had a non-instantiable filter!", job.isSuccessful());
+
+    // cleanup
+    exportTable.close();
+    importTable.close();
+  }
+
+  /**
+   * Count the number of keyvalues in the specified table for the given timerange
+   * @param start
+   * @param end
+   * @param table
+   * @return
+   * @throws IOException
+   */
+  private int getCount(HTable table, Filter filter) throws IOException {
+    Scan scan = new Scan();
+    scan.setFilter(filter);
+    ResultScanner results = table.getScanner(scan);
+    int count = 0;
+    for (Result res : results) {
+      count += res.size();
+    }
+    results.close();
+    return count;
+  }
 }

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java Thu Feb 14 12:58:12 2013
@@ -22,6 +22,8 @@ import java.io.UnsupportedEncodingExcept
 import java.util.List;
 import java.util.ArrayList;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -51,6 +53,7 @@ import static org.junit.Assert.*;
 
 @Category(MediumTests.class)
 public class TestImportTsv {
+  private static final Log LOG = LogFactory.getLog(TestImportTsv.class);
 
   @Test
   public void testTsvParserSpecParsing() {
@@ -266,7 +269,6 @@ public class TestImportTsv {
     args = opts.getRemainingArgs();
 
     try {
-
       FileSystem fs = FileSystem.get(conf);
       FSDataOutputStream op = fs.create(new Path(inputFile), true);
       if (data == null) {
@@ -280,8 +282,11 @@ public class TestImportTsv {
       if (conf.get(ImportTsv.BULK_OUTPUT_CONF_KEY) == null) {
         HTableDescriptor desc = new HTableDescriptor(TAB);
         desc.addFamily(new HColumnDescriptor(FAM));
-        new HBaseAdmin(conf).createTable(desc);
+        HBaseAdmin admin = new HBaseAdmin(conf);
+        admin.createTable(desc);
+        admin.close();
       } else { // set the hbaseAdmin as we are not going through main()
+        LOG.info("set the hbaseAdmin");
         ImportTsv.createHbaseAdmin(conf);
       }
       Job job = ImportTsv.createSubmittableJob(conf, args);
@@ -323,6 +328,7 @@ public class TestImportTsv {
           // continue
         }
       }
+      table.close();
       assertTrue(verified);
     } finally {
       htu1.shutdownMiniMapReduceCluster();

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Thu Feb 14 12:58:12 2013
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.*;
 import org.junit.experimental.categories.Category;
@@ -62,7 +62,9 @@ public class TestLoadIncrementalHFiles {
   public static String COMPRESSION =
     Compression.Algorithm.NONE.getName();
 
-  private static HBaseTestingUtility util = new HBaseTestingUtility();
+  static HBaseTestingUtility util = new HBaseTestingUtility();
+  //used by secure subclass
+  static boolean useSecure = false;
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
@@ -151,8 +153,7 @@ public class TestLoadIncrementalHFiles {
 
     HTable table = new HTable(util.getConfiguration(), TABLE);
     util.waitTableAvailable(TABLE, 30000);
-    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(
-      util.getConfiguration());
+    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration(), useSecure);
     loader.doBulkLoad(dir, table);
 
     assertEquals(expectedRows, util.countRows(table));

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java Thu Feb 14 12:58:12 2013
@@ -71,7 +71,9 @@ import com.google.protobuf.ServiceExcept
 public class TestLoadIncrementalHFilesSplitRecovery {
   final static Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class);
 
-  private static HBaseTestingUtility util;
+  static HBaseTestingUtility util;
+  //used by secure subclass
+  static boolean useSecure = false;
 
   final static int NUM_CFS = 10;
   final static byte[] QUAL = Bytes.toBytes("qual");
@@ -138,8 +140,7 @@ public class TestLoadIncrementalHFilesSp
    */
   private void populateTable(String table, int value) throws Exception {
     // create HFiles for different column families
-    LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
-        util.getConfiguration());
+    LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration(), useSecure);
     Path bulk1 = buildBulkFiles(table, value);
     HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
     lih.doBulkLoad(bulk1, t);
@@ -231,7 +232,7 @@ public class TestLoadIncrementalHFilesSp
     final AtomicInteger attmptedCalls = new AtomicInteger();
     final AtomicInteger failedCalls = new AtomicInteger();
     LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
-        util.getConfiguration()) {
+        util.getConfiguration(), useSecure) {
 
       protected List<LoadQueueItem> tryAtomicRegionLoad(final HConnection conn,
           byte[] tableName, final byte[] first, Collection<LoadQueueItem> lqis)
@@ -299,7 +300,7 @@ public class TestLoadIncrementalHFilesSp
     // files to fail when attempt to atomically import.  This is recoverable.
     final AtomicInteger attemptedCalls = new AtomicInteger();
     LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles(
-        util.getConfiguration()) {
+        util.getConfiguration(), useSecure) {
 
       protected void bulkLoadPhase(final HTable htable, final HConnection conn,
           ExecutorService pool, Deque<LoadQueueItem> queue,
@@ -340,7 +341,7 @@ public class TestLoadIncrementalHFilesSp
 
     final AtomicInteger countedLqis= new AtomicInteger();
     LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
-        util.getConfiguration()) {
+        util.getConfiguration(), useSecure) {
       protected List<LoadQueueItem> groupOrSplit(
           Multimap<ByteBuffer, LoadQueueItem> regionGroups,
           final LoadQueueItem item, final HTable htable,
@@ -372,7 +373,7 @@ public class TestLoadIncrementalHFilesSp
     setupTable(table, 10);
 
     LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
-        util.getConfiguration()) {
+        util.getConfiguration(), useSecure) {
       int i = 0;
 
       protected List<LoadQueueItem> groupOrSplit(

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java Thu Feb 14 12:58:12 2013
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.TreeMap;
+import java.util.concurrent.ConcurrentSkipListMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -35,7 +36,6 @@ import org.apache.hadoop.hbase.client.Cl
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.ipc.ProtocolSignature;
 import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
@@ -65,19 +65,15 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse;
 import org.apache.hadoop.hbase.regionserver.CompactionRequestor;
 import org.apache.hadoop.hbase.regionserver.FlushRequester;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -188,20 +184,6 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public long getProtocolVersion(String protocol, long clientVersion)
-      throws IOException {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  @Override
-  public ProtocolSignature getProtocolSignature(String protocol,
-      long clientVersion, int clientMethodsHash) throws IOException {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
   public boolean isStopped() {
     // TODO Auto-generated method stub
     return false;
@@ -252,7 +234,7 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public boolean removeFromOnlineRegions(String encodedRegionName, ServerName destination) {
+  public boolean removeFromOnlineRegions(HRegion r, ServerName destination) {
     // TODO Auto-generated method stub
     return false;
   }
@@ -290,12 +272,6 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public HLog getWAL() {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
   public CompactionRequestor getCompactionRequester() {
     // TODO Auto-generated method stub
     return null;
@@ -326,7 +302,7 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public Map<byte[], Boolean> getRegionsInTransitionInRS() {
+  public ConcurrentSkipListMap<byte[], Boolean> getRegionsInTransitionInRS() {
     // TODO Auto-generated method stub
     return null;
   }
@@ -351,6 +327,22 @@ class MockRegionServer implements AdminP
   }
 
   @Override
+  public MultiGetResponse multiGet(RpcController controller, MultiGetRequest requests)
+      throws ServiceException {
+    byte[] regionName = requests.getRegion().getValue().toByteArray();
+    Map<byte [], Result> m = this.gets.get(regionName);
+    MultiGetResponse.Builder builder = MultiGetResponse.newBuilder();
+    if (m != null) {
+      for (ClientProtos.Get get: requests.getGetList()) {
+        byte[] row = get.getRow().toByteArray();
+        builder.addResult(ProtobufUtil.toResult(m.get(row)));
+      }
+    }
+    return builder.build();
+  }
+
+
+  @Override
   public MutateResponse mutate(RpcController controller, MutateRequest request)
       throws ServiceException {
     // TODO Auto-generated method stub
@@ -386,20 +378,6 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public LockRowResponse lockRow(RpcController controller,
-      LockRowRequest request) throws ServiceException {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
-  public UnlockRowResponse unlockRow(RpcController controller,
-      UnlockRowRequest request) throws ServiceException {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
   public BulkLoadHFileResponse bulkLoadHFile(RpcController controller,
       BulkLoadHFileRequest request) throws ServiceException {
     // TODO Auto-generated method stub
@@ -407,13 +385,6 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public ExecCoprocessorResponse execCoprocessor(RpcController controller,
-      ExecCoprocessorRequest request) throws ServiceException {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
   public ClientProtos.CoprocessorServiceResponse execService(RpcController controller,
       ClientProtos.CoprocessorServiceRequest request) throws ServiceException {
     return null;
@@ -522,4 +493,10 @@ class MockRegionServer implements AdminP
     // TODO Auto-generated method stub
     return null;
   }
+
+  @Override
+  public HLog getWAL(HRegionInfo regionInfo) throws IOException {
+    // TODO Auto-generated method stub
+    return null;
+  }
 }

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java Thu Feb 14 12:58:12 2013
@@ -689,7 +689,7 @@ public class TestAssignmentManager {
     };
     ((ZooKeeperWatcher) zkw).registerListener(am);
     Mockito.doThrow(new InterruptedException()).when(recoverableZk)
-        .getChildren("/hbase/unassigned", null);
+        .getChildren("/hbase/region-in-transition", null);
     am.setWatcher((ZooKeeperWatcher) zkw);
     try {
       am.processDeadServersAndRegionsInTransition(null);

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java Thu Feb 14 12:58:12 2013
@@ -116,7 +116,7 @@ public class TestCatalogJanitor {
       this.ct = Mockito.mock(CatalogTracker.class);
       AdminProtocol hri = Mockito.mock(AdminProtocol.class);
       Mockito.when(this.ct.getConnection()).thenReturn(this.connection);
-      Mockito.when(ct.waitForMetaServerConnectionDefault()).thenReturn(hri);
+      Mockito.when(ct.waitForMetaServerConnection(Mockito.anyLong())).thenReturn(hri);
     }
 
     @Override
@@ -160,7 +160,7 @@ public class TestCatalogJanitor {
         this.ct.stop();
       }
       if (this.connection != null) {
-        HConnectionManager.deleteConnection(this.connection.getConfiguration(), true);
+        HConnectionManager.deleteConnection(this.connection.getConfiguration());
       }
     }
   }
@@ -299,6 +299,27 @@ public class TestCatalogJanitor {
     public boolean registerService(Service instance) {
       return false;
     }
+
+    @Override
+    public void deleteTable(byte[] tableName) throws IOException { }
+
+    @Override
+    public void modifyTable(byte[] tableName, HTableDescriptor descriptor) throws IOException { }
+
+    @Override
+    public void enableTable(byte[] tableName) throws IOException { }
+
+    @Override
+    public void disableTable(byte[] tableName) throws IOException { }
+
+    @Override
+    public void addColumn(byte[] tableName, HColumnDescriptor column) throws IOException { }
+
+    @Override
+    public void modifyColumn(byte[] tableName, HColumnDescriptor descriptor) throws IOException { }
+
+    @Override
+    public void deleteColumn(byte[] tableName, byte[] columnName) throws IOException { }
   }
 
   @Test

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java Thu Feb 14 12:58:12 2013
@@ -212,13 +212,13 @@ public class TestDistributedLogSplitting
     long endt = curt + waitTime;
     while (curt < endt) {
       if ((tot_wkr_task_resigned.get() + tot_wkr_task_err.get() +
-          tot_wkr_final_transistion_failed.get() + tot_wkr_task_done.get() +
+          tot_wkr_final_transition_failed.get() + tot_wkr_task_done.get() +
           tot_wkr_preempt_task.get()) == 0) {
         Thread.yield();
         curt = System.currentTimeMillis();
       } else {
         assertEquals(1, (tot_wkr_task_resigned.get() + tot_wkr_task_err.get() +
-            tot_wkr_final_transistion_failed.get() + tot_wkr_task_done.get() +
+            tot_wkr_final_transition_failed.get() + tot_wkr_task_done.get() +
             tot_wkr_preempt_task.get()));
         return;
       }
@@ -226,7 +226,7 @@ public class TestDistributedLogSplitting
     fail("none of the following counters went up in " + waitTime +
         " milliseconds - " +
         "tot_wkr_task_resigned, tot_wkr_task_err, " +
-        "tot_wkr_final_transistion_failed, tot_wkr_task_done, " +
+        "tot_wkr_final_transition_failed, tot_wkr_task_done, " +
         "tot_wkr_preempt_task");
   }
 

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java Thu Feb 14 12:58:12 2013
@@ -27,8 +27,9 @@ import java.net.SocketTimeoutException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.ipc.HBaseRPC;
+import org.apache.hadoop.hbase.ipc.HBaseClientRPC;
 import org.apache.hadoop.hbase.MasterMonitorProtocol;
+import org.apache.hadoop.hbase.ipc.ProtobufRpcClientEngine;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest;
 import org.junit.Test;
@@ -50,33 +51,36 @@ public class TestHMasterRPCException {
 
     ServerName sm = hm.getServerName();
     InetSocketAddress isa = new InetSocketAddress(sm.getHostname(), sm.getPort());
-    int i = 0;
-    //retry the RPC a few times; we have seen SocketTimeoutExceptions if we
-    //try to connect too soon. Retry on SocketTimeoutException.
-    while (i < 20) { 
-      try {
-        MasterMonitorProtocol inf = (MasterMonitorProtocol) HBaseRPC.getProxy(
-            MasterMonitorProtocol.class,  MasterMonitorProtocol.VERSION, isa, conf, 100 * 10);
-        inf.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
-        fail();
-      } catch (ServiceException ex) {
-        IOException ie = ProtobufUtil.getRemoteException(ex);
-        if (!(ie instanceof SocketTimeoutException)) {
-          if(ie.getMessage().startsWith(
-              "org.apache.hadoop.hbase.ipc.ServerNotRunningYetException: Server is not running yet")) {
-            return;
+    ProtobufRpcClientEngine engine = new ProtobufRpcClientEngine(conf);
+    try {
+      int i = 0;
+      //retry the RPC a few times; we have seen SocketTimeoutExceptions if we
+      //try to connect too soon. Retry on SocketTimeoutException.
+      while (i < 20) {
+        try {
+          MasterMonitorProtocol inf = engine.getProxy(
+              MasterMonitorProtocol.class, isa, conf, 100 * 10);
+          inf.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
+          fail();
+        } catch (ServiceException ex) {
+          IOException ie = ProtobufUtil.getRemoteException(ex);
+          if (!(ie instanceof SocketTimeoutException)) {
+            if(ie.getMessage().startsWith(
+                "org.apache.hadoop.hbase.ipc.ServerNotRunningYetException: Server is not running yet")) {
+              return;
+            }
+          } else {
+            System.err.println("Got SocketTimeoutException. Will retry. ");
           }
-        } else {
-          System.err.println("Got SocketTimeoutException. Will retry. ");
+        } catch (Throwable t) {
+          fail("Unexpected throwable: " + t);
         }
-      } catch (Throwable t) {
-        fail("Unexpected throwable: " + t);
+        Thread.sleep(100);
+        i++;
       }
-      Thread.sleep(100);
-      i++;
+      fail();
+    } finally {
+      engine.close();
     }
-    fail();
   }
-
-}
-
+}
\ No newline at end of file

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java Thu Feb 14 12:58:12 2013
@@ -198,7 +198,7 @@ public class TestMasterNoCluster {
 
       @Override
       CatalogTracker createCatalogTracker(ZooKeeperWatcher zk,
-          Configuration conf, Abortable abortable, int defaultTimeout)
+          Configuration conf, Abortable abortable)
       throws IOException {
         // Insert a mock for the connection used by the CatalogTracker.  Any
         // regionserver should do.  Use TESTUTIL.getConfiguration rather than
@@ -207,7 +207,7 @@ public class TestMasterNoCluster {
         HConnection connection =
           HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(),
             rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO);
-        return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout);
+        return new CatalogTracker(zk, conf, connection, abortable);
       }
     };
     master.start();
@@ -284,7 +284,7 @@ public class TestMasterNoCluster {
 
       @Override
       CatalogTracker createCatalogTracker(ZooKeeperWatcher zk,
-          Configuration conf, Abortable abortable, int defaultTimeout)
+          Configuration conf, Abortable abortable)
       throws IOException {
         // Insert a mock for the connection used by the CatalogTracker.   Use
         // TESTUTIL.getConfiguration rather than the conf from the master; the
@@ -293,7 +293,7 @@ public class TestMasterNoCluster {
         HConnection connection =
           HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(),
             rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO);
-        return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout);
+        return new CatalogTracker(zk, conf, connection, abortable);
       }
     };
     master.start();

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java Thu Feb 14 12:58:12 2013
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.MediumTes
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogTask;
 import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.master.SplitLogManager.Task;
 import org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch;
 import org.apache.hadoop.hbase.regionserver.TestMasterAddressManager.NodeCreationListener;
@@ -124,7 +125,7 @@ public class TestSplitLogManager {
     Mockito.when(sm.isServerOnline(Mockito.any(ServerName.class))).thenReturn(true);
     Mockito.when(master.getServerManager()).thenReturn(sm);
 
-    to = 4000;
+    to = 6000;
     conf.setInt("hbase.splitlog.manager.timeout", to);
     conf.setInt("hbase.splitlog.manager.unassigned.timeout", 2 * to);
     conf.setInt("hbase.splitlog.manager.timeoutmonitor.period", 100);
@@ -142,7 +143,8 @@ public class TestSplitLogManager {
     public long eval();
   }
 
-  private void waitForCounter(final AtomicLong ctr, long oldval, long newval, long timems) {
+  private void waitForCounter(final AtomicLong ctr, long oldval, long newval, long timems)
+      throws Exception {
     Expr e = new Expr() {
       public long eval() {
         return ctr.get();
@@ -152,23 +154,17 @@ public class TestSplitLogManager {
     return;
   }
 
-  private void waitForCounter(Expr e, long oldval, long newval,
-      long timems) {
-    long curt = System.currentTimeMillis();
-    long endt = curt + timems;
-    while (curt < endt) {
-      if (e.eval() == oldval) {
-        try {
-          Thread.sleep(10);
-        } catch (InterruptedException eintr) {
+  private void waitForCounter(final Expr e, final long oldval, long newval, long timems)
+      throws Exception {
+
+    TEST_UTIL.waitFor(timems, 10, new Waiter.Predicate<Exception>() {
+        @Override
+        public boolean evaluate() throws Exception {
+            return (e.eval() != oldval);
         }
-        curt = System.currentTimeMillis();
-      } else {
-        assertEquals(newval, e.eval());
-        return;
-      }
-    }
-    assertTrue(false);
+    });
+
+    assertEquals(newval, e.eval());
   }
 
   private String submitTaskAndWait(TaskBatch batch, String name)
@@ -220,7 +216,7 @@ public class TestSplitLogManager {
 
     slm = new SplitLogManager(zkw, conf, stopper, master, DUMMY_MASTER, null);
     slm.finishInitialization();
-    waitForCounter(tot_mgr_orphan_task_acquired, 0, 1, 100);
+    waitForCounter(tot_mgr_orphan_task_acquired, 0, 1, to/2);
     Task task = slm.findOrCreateOrphanTask(tasknode);
     assertTrue(task.isOrphan());
     waitForCounter(tot_mgr_heartbeat, 0, 1, to/2);
@@ -508,7 +504,7 @@ public class TestSplitLogManager {
     LOG.info("testVanishingTaskZNode");
 
     conf.setInt("hbase.splitlog.manager.unassigned.timeout", 0);
-
+    conf.setInt("hbase.splitlog.manager.timeoutmonitor.period", 1000);
     slm = new SplitLogManager(zkw, conf, stopper, master, DUMMY_MASTER, null);
     slm.finishInitialization();
     FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -537,7 +533,7 @@ public class TestSplitLogManager {
       // remove the task znode, to finish the distributed log splitting
       ZKUtil.deleteNode(zkw, znode);
       waitForCounter(tot_mgr_get_data_nonode, 0, 1, 30000);
-      waitForCounter(tot_mgr_log_split_batch_success, 0, 1, 1000);
+      waitForCounter(tot_mgr_log_split_batch_success, 0, 1, to/2);
       assertTrue(fs.exists(logFile));
     } finally {
       if (thread != null) {
@@ -550,4 +546,3 @@ public class TestSplitLogManager {
   }
 
 }
-

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java Thu Feb 14 12:58:12 2013
@@ -48,11 +48,9 @@ public class TestCleanerChore {
   @After
   public void cleanup() throws Exception {
     // delete and recreate the test directory, ensuring a clean test dir between tests
-    Path testDir = UTIL.getDataTestDir();
-    FileSystem fs = UTIL.getTestFileSystem();
-    fs.delete(testDir, true);
-    fs.mkdirs(testDir);
-  }
+    UTIL.cleanupTestDir();
+}
+
 
   @Test
   public void testSavesFilesOnRequest() throws Exception {
@@ -95,8 +93,10 @@ public class TestCleanerChore {
     // create the directory layout in the directory to clean
     Path parent = new Path(testDir, "parent");
     Path child = new Path(parent, "child");
+    Path emptyChild = new Path(parent, "emptyChild");
     Path file = new Path(child, "someFile");
     fs.mkdirs(child);
+    fs.mkdirs(emptyChild);
     // touch a new file
     fs.create(file).close();
     // also create a file in the top level directory
@@ -225,6 +225,66 @@ public class TestCleanerChore {
     Mockito.reset(spy);
   }
 
+  /**
+   * The cleaner runs in a loop, where it first checks to see all the files under a directory can be
+   * deleted. If they all can, then we try to delete the directory. However, a file may be added
+   * that directory to after the original check. This ensures that we don't accidentally delete that
+   * directory on and don't get spurious IOExceptions.
+   * <p>
+   * This was from HBASE-7465.
+   * @throws Exception on failure
+   */
+  @Test
+  public void testNoExceptionFromDirectoryWithRacyChildren() throws Exception {
+    Stoppable stop = new StoppableImplementation();
+    // need to use a localutil to not break the rest of the test that runs on the local FS, which
+    // gets hosed when we start to use a minicluster.
+    HBaseTestingUtility localUtil = new HBaseTestingUtility();
+    Configuration conf = localUtil.getConfiguration();
+    final Path testDir = UTIL.getDataTestDir();
+    final FileSystem fs = UTIL.getTestFileSystem();
+    LOG.debug("Writing test data to: " + testDir);
+    String confKey = "hbase.test.cleaner.delegates";
+    conf.set(confKey, AlwaysDelete.class.getName());
+
+    AllValidPaths chore = new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey);
+    // spy on the delegate to ensure that we don't check for directories
+    AlwaysDelete delegate = (AlwaysDelete) chore.cleanersChain.get(0);
+    AlwaysDelete spy = Mockito.spy(delegate);
+    chore.cleanersChain.set(0, spy);
+
+    // create the directory layout in the directory to clean
+    final Path parent = new Path(testDir, "parent");
+    Path file = new Path(parent, "someFile");
+    fs.mkdirs(parent);
+    // touch a new file
+    fs.create(file).close();
+    assertTrue("Test file didn't get created.", fs.exists(file));
+    final Path racyFile = new Path(parent, "addedFile");
+
+    // when we attempt to delete the original file, add another file in the same directory
+    Mockito.doAnswer(new Answer<Boolean>() {
+      @Override
+      public Boolean answer(InvocationOnMock invocation) throws Throwable {
+        fs.create(racyFile).close();
+        FSUtils.logFileSystemState(fs, testDir, LOG);
+        return (Boolean) invocation.callRealMethod();
+      }
+    }).when(spy).isFileDeletable(Mockito.any(Path.class));
+
+    // attempt to delete the directory, which
+    if (chore.checkAndDeleteDirectory(parent)) {
+      throw new Exception(
+          "Reported success deleting directory, should have failed when adding file mid-iteration");
+    }
+
+    // make sure all the directories + added file exist, but the original file is deleted
+    assertTrue("Added file unexpectedly deleted", fs.exists(racyFile));
+    assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
+    assertFalse("Original file unexpectedly retained", fs.exists(file));
+    Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(Path.class));
+  }
+
   private static class AllValidPaths extends CleanerChore<BaseHFileCleanerDelegate> {
 
     public AllValidPaths(String name, Stoppable s, Configuration conf, FileSystem fs,
@@ -252,4 +312,4 @@ public class TestCleanerChore {
       return false;
     }
   }
-}
\ No newline at end of file
+}

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java Thu Feb 14 12:58:12 2013
@@ -88,7 +88,7 @@ public class TestHFileCleaner {
         + status.getAccessTime();
   }
 
-  @Test
+  @Test(timeout = 60 *1000)
   public void testHFileCleaning() throws Exception {
     final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
     String prefix = "someHFileThatWouldBeAUUID";

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java Thu Feb 14 12:58:12 2013
@@ -63,7 +63,7 @@ public class TestLogsCleaner {
   public void testLogCleaning() throws Exception{
     Configuration conf = TEST_UTIL.getConfiguration();
     // set TTL
-    long ttl = 2000;
+    long ttl = 10000;
     conf.setLong("hbase.master.logcleaner.ttl", ttl);
     conf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true);
     Replication.decorateMasterConfiguration(conf);

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java Thu Feb 14 12:58:12 2013
@@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.client.Ge
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.coprocessor.Exec;
-import org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate;
@@ -292,25 +290,4 @@ public class TestProtobufUtil {
     Scan scan = ProtobufUtil.toScan(proto);
     assertEquals(scanBuilder.build(), ProtobufUtil.toScan(scan));
   }
-
-  /**
-   * Test basic Exec conversions.
-   *
-   * @throws IOException
-   */
-  @Test
-  public void testExec() throws IOException {
-    ClientProtos.Exec.Builder execBuilder = ClientProtos.Exec.newBuilder();
-    execBuilder.setRow(ByteString.copyFromUtf8("row"));
-    execBuilder.setProtocolName(ColumnAggregationEndpoint.class.getName());
-    execBuilder.setMethodName("sum");
-    execBuilder.addParameter(ProtobufUtil.toParameter(Bytes.toBytes("f")));
-    execBuilder.addParameter(ProtobufUtil.toParameter(Bytes.toBytes("c")));
-
-    ClientProtos.Exec proto = execBuilder.build();
-    Exec exec = ProtobufUtil.toExec(proto);
-    execBuilder = ClientProtos.Exec.newBuilder(ProtobufUtil.toExec(exec));
-    execBuilder.clearProperty(); // remove properties added by default
-    assertEquals(proto, execBuilder.build());
-  }
 }

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java Thu Feb 14 12:58:12 2013
@@ -101,7 +101,7 @@ public class CreateRandomStoreFile {
             + Arrays.toString(Compression.Algorithm.values()));
     options.addOption(BLOOM_FILTER_OPTION, "bloom_filter", true,
         "Bloom filter type, one of "
-            + Arrays.toString(StoreFile.BloomType.values()));
+            + Arrays.toString(BloomType.values()));
     options.addOption(BLOCK_SIZE_OPTION, "block_size", true,
         "HFile block size");
     options.addOption(BLOOM_BLOCK_SIZE_OPTION, "bloom_block_size", true,
@@ -162,9 +162,9 @@ public class CreateRandomStoreFile {
           cmdLine.getOptionValue(COMPRESSION_OPTION));
     }
 
-    StoreFile.BloomType bloomType = StoreFile.BloomType.NONE;
+    BloomType bloomType = BloomType.NONE;
     if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
-      bloomType = StoreFile.BloomType.valueOf(cmdLine.getOptionValue(
+      bloomType = BloomType.valueOf(cmdLine.getOptionValue(
           BLOOM_FILTER_OPTION));
     }
 

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java Thu Feb 14 12:58:12 2013
@@ -367,8 +367,6 @@ public class DataBlockEncodingTool {
   /**
    * Check decompress performance of a given algorithm and print it.
    * @param algorithm Compression algorithm.
-   * @param compressorCodec Compressor to be tested.
-   * @param decompressorCodec Decompressor of the same algorithm.
    * @param name Name of algorithm.
    * @param buffer Buffer to be compressed.
    * @param offset Position of the beginning of the data.
@@ -584,7 +582,7 @@ public class DataBlockEncodingTool {
     CacheConfig cacheConf = new CacheConfig(conf);
     FileSystem fs = FileSystem.get(conf);
     StoreFile hsf = new StoreFile(fs, path, conf, cacheConf,
-        StoreFile.BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
 
     StoreFile.Reader reader = hsf.createReader();
     reader.loadFileInfo();

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java Thu Feb 14 12:58:12 2013
@@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 
 /**
  * Test seek performance for encoded data blocks. Read an HFile and do several

Modified: hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java Thu Feb 14 12:58:12 2013
@@ -146,7 +146,7 @@ public class HFileReadWriteTest {
   private HFileDataBlockEncoder dataBlockEncoder =
       NoOpDataBlockEncoder.INSTANCE;
 
-  private StoreFile.BloomType bloomType = StoreFile.BloomType.NONE;
+  private BloomType bloomType = BloomType.NONE;
   private int blockSize;
   private Compression.Algorithm compression = Compression.Algorithm.NONE;
 
@@ -178,7 +178,7 @@ public class HFileReadWriteTest {
         + Arrays.toString(Compression.Algorithm.values()) +
         Workload.MERGE.onlyUsedFor());
     options.addOption(BLOOM_FILTER_OPTION, true, "Bloom filter type, one of "
-        + Arrays.toString(StoreFile.BloomType.values()) +
+        + Arrays.toString(BloomType.values()) +
         Workload.MERGE.onlyUsedFor());
     options.addOption(BLOCK_SIZE_OPTION, true, "HFile block size" +
         Workload.MERGE.onlyUsedFor());
@@ -239,7 +239,7 @@ public class HFileReadWriteTest {
     }
 
     if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
-      bloomType = StoreFile.BloomType.valueOf(cmdLine.getOptionValue(
+      bloomType = BloomType.valueOf(cmdLine.getOptionValue(
           BLOOM_FILTER_OPTION));
     }
 
@@ -407,7 +407,7 @@ public class HFileReadWriteTest {
       Scan scan = new Scan();
 
       // Include deletes
-      scanner = new StoreScanner(store, store.scanInfo, scan, scanners,
+      scanner = new StoreScanner(store, store.getScanInfo(), scan, scanners,
           ScanType.MAJOR_COMPACT, Long.MIN_VALUE, Long.MIN_VALUE);
 
       ArrayList<KeyValue> kvs = new ArrayList<KeyValue>();
@@ -468,7 +468,7 @@ public class HFileReadWriteTest {
     // We are passing the ROWCOL Bloom filter type, but StoreFile will still
     // use the Bloom filter type specified in the HFile.
     return new StoreFile(fs, filePath, conf, cacheConf,
-        StoreFile.BloomType.ROWCOL, dataBlockEncoder);
+        BloomType.ROWCOL, dataBlockEncoder);
   }
 
   public static int charToHex(int c) {