You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/08/22 23:36:30 UTC

svn commit: r1516622 - in /hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase: ./ backup/example/ client/ coprocessor/ io/encoding/ io/hfile/ master/ regionserver/ regionserver/handler/ security/token/

Author: jmhsieh
Date: Thu Aug 22 21:36:29 2013
New Revision: 1516622

URL: http://svn.apache.org/r1516622
Log:
HBASE-9274 After HBASE-8408 applied, temporary test files are being left in /tmp/hbase-<user>

Added a HBaseTestingUtility#createLocalHTU for tests where hbaseroot is set to a unique testdata dir to prevent detritus.

Modified:
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java Thu Aug 22 21:36:29 2013
@@ -65,17 +65,16 @@ public abstract class HBaseTestCase exte
   protected static final char LAST_CHAR = 'z';
   protected static final String PUNCTUATION = "~`@#$%^&*()-_+=:;',.<>/?[]{}|";
   protected static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
-  protected String START_KEY;
+  protected String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
   protected static final int MAXVERSIONS = 3;
 
   protected final HBaseTestingUtility testUtil = new HBaseTestingUtility();
 
-  public volatile Configuration conf;
+  public volatile Configuration conf = HBaseConfiguration.create();
 
   /** constructor */
   public HBaseTestCase() {
     super();
-    init();
   }
 
   /**
@@ -83,12 +82,6 @@ public abstract class HBaseTestCase exte
    */
   public HBaseTestCase(String name) {
     super(name);
-    init();
-  }
-
-  private void init() {
-    conf = HBaseConfiguration.create();
-    START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
   }
 
   /**
@@ -246,7 +239,7 @@ public abstract class HBaseTestCase exte
    * @throws IOException
    * @return count of what we added.
    */
-  protected static long addContent(final HRegion r, final byte [] columnFamily)
+  public static long addContent(final HRegion r, final byte [] columnFamily)
   throws IOException {
     return addContent(r, columnFamily, null);
   }
@@ -265,7 +258,7 @@ public abstract class HBaseTestCase exte
     return addContent(updater, columnFamily, START_KEY_BYTES, null);
   }
 
-  protected static long addContent(final Incommon updater, final String family,
+  public static long addContent(final Incommon updater, final String family,
       final String column) throws IOException {
     return addContent(updater, family, column, START_KEY_BYTES, null);
   }
@@ -287,7 +280,7 @@ public abstract class HBaseTestCase exte
     return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
   }
 
-  protected static long addContent(final Incommon updater, final String family,
+  public static long addContent(final Incommon updater, final String family,
                                    final String column, final byte [] startKeyBytes,
                                    final byte [] endKey) throws IOException {
     return addContent(updater, family, column, startKeyBytes, endKey, -1);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Thu Aug 22 21:36:29 2013
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.ChecksumUtil;
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.ipc.RpcServerInterface;
 import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.RegionStates;
@@ -80,7 +81,9 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl;
+import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.tool.Canary;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
@@ -93,7 +96,6 @@ import org.apache.hadoop.hbase.zookeeper
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;
 import org.apache.hadoop.hbase.zookeeper.ZKConfig;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import org.apache.hadoop.hbase.tool.Canary;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -218,6 +220,32 @@ public class HBaseTestingUtility extends
   }
 
   /**
+   * Create an HBaseTestingUtility where all tmp files are written to the local test data dir.
+   * It is needed to properly base FSUtil.getRootDirs so that they drop temp files in the proper
+   * test dir.  Use this when you aren't using an Mini HDFS cluster.
+   * @return HBaseTestingUtility that use local fs for temp files.
+   */
+  public static HBaseTestingUtility createLocalHTU() {
+    Configuration c = HBaseConfiguration.create();
+    return createLocalHTU(c);
+  }
+
+  /**
+   * Create an HBaseTestingUtility where all tmp files are written to the local test data dir.
+   * It is needed to properly base FSUtil.getRootDirs so that they drop temp files in the proper
+   * test dir.  Use this when you aren't using an Mini HDFS cluster.
+   * @param c Configuration (will be modified)
+   * @return HBaseTestingUtility that use local fs for temp files.
+   */
+  public static HBaseTestingUtility createLocalHTU(Configuration c) {
+    HBaseTestingUtility htu = new HBaseTestingUtility(c);
+    String dataTestDir = htu.getDataTestDir().toString();
+    htu.getConfiguration().set(HConstants.HBASE_DIR, dataTestDir);
+    LOG.debug("Setting " + HConstants.HBASE_DIR + " to " + dataTestDir);
+    return htu;
+  }
+
+  /**
    * Returns this classes's instance of {@link Configuration}.  Be careful how
    * you use the returned Configuration since {@link HConnection} instances
    * can be shared.  The Map of HConnections is keyed by the Configuration.  If
@@ -1439,6 +1467,81 @@ public class HBaseTestingUtility extends
     getHBaseAdmin().deleteTable(tableName);
   }
 
+  // ==========================================================================
+  // Canned table and table descriptor creation
+  // TODO replace HBaseTestCase
+  
+  public final static byte [] fam1 = Bytes.toBytes("colfamily11");
+  public final static byte [] fam2 = Bytes.toBytes("colfamily21");
+  public final static byte [] fam3 = Bytes.toBytes("colfamily31");
+  public static final byte[][] COLUMNS = {fam1, fam2, fam3};
+  private static final int MAXVERSIONS = 3;
+  
+  private static final char FIRST_CHAR = 'a';
+  public static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
+
+
+  /**
+   * Create a table of name <code>name</code> with {@link COLUMNS} for
+   * families.
+   * @param name Name to give table.
+   * @param versions How many versions to allow per column.
+   * @return Column descriptor.
+   */
+  public HTableDescriptor createTableDescriptor(final String name,
+      final int minVersions, final int versions, final int ttl, boolean keepDeleted) {
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
+      htd.addFamily(new HColumnDescriptor(cfName)
+          .setMinVersions(minVersions)
+          .setMaxVersions(versions)
+          .setKeepDeletedCells(keepDeleted)
+          .setBlockCacheEnabled(false)
+          .setTimeToLive(ttl)
+      );
+    }
+    return htd;
+  }
+
+  /**
+   * Create a table of name <code>name</code> with {@link COLUMNS} for
+   * families.
+   * @param name Name to give table.
+   * @return Column descriptor.
+   */
+  public HTableDescriptor createTableDescriptor(final String name) {
+    return createTableDescriptor(name,  HColumnDescriptor.DEFAULT_MIN_VERSIONS,
+        MAXVERSIONS, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED);
+  }
+
+  /**
+   * Create an HRegion that writes to the local tmp dirs
+   * @param desc
+   * @param startKey
+   * @param endKey
+   * @return
+   * @throws IOException
+   */
+  public HRegion createLocalHRegion(HTableDescriptor desc, byte [] startKey,
+      byte [] endKey)
+  throws IOException {
+    HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey);
+    return createLocalHRegion(hri, desc);
+  }
+
+  /**
+   * Create an HRegion that writes to the local tmp dirs
+   * @param info
+   * @param desc
+   * @return
+   * @throws IOException
+   */
+  public HRegion createLocalHRegion(HRegionInfo info, HTableDescriptor desc) throws IOException {
+    return HRegion.createHRegion(info, getDataTestDir(), getConfiguration(), desc);
+  }
+  
+  //
+  // ==========================================================================
 
   /**
    * Provide an existing table name to truncate
@@ -1985,6 +2088,34 @@ public class HBaseTestingUtility extends
   }
 
   /**
+   * Create a stubbed out RegionServerService, mainly for getting FS.
+   */
+  public RegionServerServices createMockRegionServerService() throws IOException { 
+    return createMockRegionServerService((ServerName)null);
+  }
+
+  /**
+   * Create a stubbed out RegionServerService, mainly for getting FS. 
+   * This version is used by TestTokenAuthentication
+   */
+  public RegionServerServices createMockRegionServerService(RpcServerInterface rpc) throws IOException {
+    final MockRegionServerServices rss = new MockRegionServerServices(getZooKeeperWatcher());
+    rss.setFileSystem(getTestFileSystem());
+    rss.setRpcServer(rpc);
+    return rss;
+  }
+
+  /**
+   * Create a stubbed out RegionServerService, mainly for getting FS. 
+   * This version is used by TestOpenRegionHandler
+   */
+  public RegionServerServices createMockRegionServerService(ServerName name) throws IOException {
+    final MockRegionServerServices rss = new MockRegionServerServices(getZooKeeperWatcher(), name);
+    rss.setFileSystem(getTestFileSystem());
+    return rss;
+  }
+
+  /**
    * Switches the logger for the given class to DEBUG level.
    *
    * @param clazz  The class for which to switch to debug logging.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java Thu Aug 22 21:36:29 2013
@@ -66,7 +66,7 @@ import org.mockito.stubbing.Answer;
 public class TestZooKeeperTableArchiveClient {
 
   private static final Log LOG = LogFactory.getLog(TestZooKeeperTableArchiveClient.class);
-  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+  private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
   private static final String STRING_TABLE_NAME = "test";
   private static final byte[] TEST_FAM = Bytes.toBytes("fam");
   private static final byte[] TABLE_NAME = Bytes.toBytes(STRING_TABLE_NAME);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java Thu Aug 22 21:36:29 2013
@@ -18,21 +18,21 @@
 
 package org.apache.hadoop.hbase.client;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -51,10 +50,9 @@ import org.junit.experimental.categories
  */
 @Category(MediumTests.class)
 public class TestSnapshotMetadata {
-
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static final int NUM_RS = 2;
-  private static final String STRING_TABLE_NAME = "testtable";
+  private static final String STRING_TABLE_NAME = "TestSnapshotMetadata";
 
   private static final String MAX_VERSIONS_FAM_STR = "fam_max_columns";
   private static final byte[] MAX_VERSIONS_FAM = Bytes.toBytes(MAX_VERSIONS_FAM_STR);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java Thu Aug 22 21:36:29 2013
@@ -19,6 +19,14 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
@@ -33,7 +41,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -43,6 +50,7 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -54,14 +62,17 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.util.PairOfSameType;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 import org.mockito.Mockito;
 
 @Category(SmallTests.class)
-public class TestCoprocessorInterface extends HBaseTestCase {
+public class TestCoprocessorInterface {
+  @Rule public TestName name = new TestName();
   static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class);
-  private static final HBaseTestingUtility TEST_UTIL =
-    new HBaseTestingUtility();
+  private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
   static final Path DIR = TEST_UTIL.getDataTestDir();
 
   private static class CustomScanner implements RegionScanner {
@@ -262,17 +273,17 @@ public class TestCoprocessorInterface ex
     }
   }
 
+  @Test
   public void testSharedData() throws IOException {
-    TableName tableName =
-        TableName.valueOf("testtable");
+    TableName tableName = TableName.valueOf(name.getMethodName());
     byte [][] families = { fam1, fam2, fam3 };
 
     Configuration hc = initSplit();
-    HRegion region = initHRegion(tableName, getName(), hc,
+    HRegion region = initHRegion(tableName, name.getMethodName(), hc,
       new Class<?>[]{}, families);
 
     for (int i = 0; i < 3; i++) {
-      addContent(region, fam3);
+      HBaseTestCase.addContent(region, fam3);
       region.flushcache();
     }
 
@@ -340,16 +351,16 @@ public class TestCoprocessorInterface ex
     assertFalse(o3 == o2);
   }
 
+  @Test
   public void testCoprocessorInterface() throws IOException {
-    TableName tableName =
-        TableName.valueOf("testtable");
+    TableName tableName = TableName.valueOf(name.getMethodName());
     byte [][] families = { fam1, fam2, fam3 };
 
     Configuration hc = initSplit();
-    HRegion region = initHRegion(tableName, getName(), hc,
+    HRegion region = initHRegion(tableName, name.getMethodName(), hc,
       new Class<?>[]{CoprocessorImpl.class}, families);
     for (int i = 0; i < 3; i++) {
-      addContent(region, fam3);
+      HBaseTestCase.addContent(region, fam3);
       region.flushcache();
     }
 
@@ -402,6 +413,7 @@ public class TestCoprocessorInterface ex
     // is secretly loaded at OpenRegionHandler. we don't really
     // start a region server here, so just manually create cphost
     // and set it to region.
+    Configuration conf = TEST_UTIL.getConfiguration();
     RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf);
     r.setCoprocessorHost(host);
 
@@ -499,7 +511,4 @@ public class TestCoprocessorInterface ex
     return regions;
   }
 
-}
-
-
-
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java Thu Aug 22 21:36:29 2013
@@ -27,7 +27,6 @@ import java.util.Map;
 
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.client.Get;
@@ -66,7 +65,7 @@ public class TestEncodedSeekers {
   private static final int NUM_HFILES = 4;
   private static final int NUM_ROWS_PER_FLUSH = NUM_ROWS / NUM_HFILES;
 
-  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+  private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
   private final DataBlockEncoding encoding;
   private final boolean encodeOnDisk;
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java Thu Aug 22 21:36:29 2013
@@ -69,8 +69,7 @@ public class TestCacheOnWrite {
 
   private static final Log LOG = LogFactory.getLog(TestCacheOnWrite.class);
 
-  private static final HBaseTestingUtility TEST_UTIL =
-    new HBaseTestingUtility();
+  private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
   private Configuration conf;
   private CacheConfig cacheConf;
   private FileSystem fs;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java Thu Aug 22 21:36:29 2013
@@ -16,15 +16,11 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -52,8 +48,7 @@ import org.junit.runners.Parameterized.P
 @RunWith(Parameterized.class)
 public class TestForceCacheImportantBlocks {
 
-  private final HBaseTestingUtility TEST_UTIL =
-      new HBaseTestingUtility();
+  private final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   private static final String TABLE = "myTable";
   private static final String CF = "myCF";

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java Thu Aug 22 21:36:29 2013
@@ -65,9 +65,9 @@ public class TestOpenedRegionHandler {
   @Before
   public void setUp() throws Exception {
     conf = HBaseConfiguration.create();
-    TEST_UTIL = new HBaseTestingUtility(conf);
+    TEST_UTIL = HBaseTestingUtility.createLocalHTU(conf);
   }
-  
+
   @After
   public void tearDown() throws Exception {
     // Stop the cluster

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java Thu Aug 22 21:36:29 2013
@@ -16,7 +16,11 @@
  * limitations under the License.
  */
 package org.apache.hadoop.hbase.regionserver;
-
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -33,7 +37,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -59,8 +62,10 @@ import org.apache.hadoop.hbase.filter.Co
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 
 
 /**
@@ -68,8 +73,9 @@ import org.junit.experimental.categories
  * and HRegion.append
  */
 @Category(MediumTests.class) // Starts 100 threads
-public class TestAtomicOperation extends HBaseTestCase {
+public class TestAtomicOperation {
   static final Log LOG = LogFactory.getLog(TestAtomicOperation.class);
+  @Rule public TestName name = new TestName();
 
   HRegion region = null;
   private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -86,20 +92,6 @@ public class TestAtomicOperation extends
   static final byte [] row = Bytes.toBytes("rowA");
   static final byte [] row2 = Bytes.toBytes("rowB");
 
-  /**
-   * @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
-   */
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    super.tearDown();
-    EnvironmentEdgeManagerTestHelper.reset();
-  }
-
   //////////////////////////////////////////////////////////////////////////////
   // New tests that doesn't spin up a mini cluster but rather just test the
   // individual code pieces in the HRegion. 
@@ -110,8 +102,9 @@ public class TestAtomicOperation extends
    * More tests in
    * @see org.apache.hadoop.hbase.client.TestFromClientSide#testAppend()
    */
+  @Test
   public void testAppend() throws IOException {
-    initHRegion(tableName, getName(), fam1);
+    initHRegion(tableName, name.getMethodName(), fam1);
     String v1 = "Ultimate Answer to the Ultimate Question of Life,"+
     " The Universe, and Everything";
     String v2 = " is... 42.";
@@ -131,11 +124,12 @@ public class TestAtomicOperation extends
   /**
    * Test multi-threaded increments.
    */
+  @Test
   public void testIncrementMultiThreads() throws IOException {
 
     LOG.info("Starting test testIncrementMultiThreads");
     // run a with mixed column families (1 and 3 versions)
-    initHRegion(tableName, getName(), new int[] {1,3}, fam1, fam2);
+    initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2);
 
     // create 100 threads, each will increment by its own quantity
     int numThreads = 100;
@@ -202,6 +196,7 @@ public class TestAtomicOperation extends
     }
     HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
     Path path = new Path(DIR + callingMethod);
+    FileSystem fs = TEST_UTIL.getTestFileSystem();
     if (fs.exists(path)) {
       if (!fs.delete(path, true)) {
         throw new IOException("Failed delete of " + path);
@@ -250,10 +245,11 @@ public class TestAtomicOperation extends
     }
   }
 
+  @Test
   public void testAppendMultiThreads() throws IOException {
     LOG.info("Starting test testAppendMultiThreads");
     // run a with mixed column families (1 and 3 versions)
-    initHRegion(tableName, getName(), new int[] {1,3}, fam1, fam2);
+    initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2);
 
     int numThreads = 100;
     int opsPerThread = 100;
@@ -310,10 +306,11 @@ public class TestAtomicOperation extends
   /**
    * Test multi-threaded row mutations.
    */
+  @Test
   public void testRowMutationMultiThreads() throws IOException {
 
     LOG.info("Starting test testRowMutationMultiThreads");
-    initHRegion(tableName, getName(), fam1);
+    initHRegion(tableName, name.getMethodName(), fam1);
 
     // create 10 threads, each will alternate between adding and
     // removing a column
@@ -397,10 +394,11 @@ public class TestAtomicOperation extends
   /**
    * Test multi-threaded region mutations.
    */
+  @Test
   public void testMultiRowMutationMultiThreads() throws IOException {
 
     LOG.info("Starting test testMultiRowMutationMultiThreads");
-    initHRegion(tableName, getName(), fam1);
+    initHRegion(tableName, name.getMethodName(), fam1);
 
     // create 10 threads, each will alternate between adding and
     // removing a column
@@ -518,6 +516,7 @@ public class TestAtomicOperation extends
    * 
    * Moved into TestAtomicOperation from its original location, TestHBase7051
    */
+  @Test
   public void testPutAndCheckAndPutInParallel() throws Exception {
 
     final String tableName = "testPutAndCheckAndPut";

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java Thu Aug 22 21:36:29 2013
@@ -36,14 +36,16 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 
 @Category(SmallTests.class)
 public class TestColumnSeeking {
+  @Rule public TestName name = new TestName();
 
-  private final static HBaseTestingUtility TEST_UTIL =
-      new HBaseTestingUtility();
+  private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   static final Log LOG = LogFactory.getLog(TestColumnSeeking.class);
 
@@ -52,7 +54,7 @@ public class TestColumnSeeking {
   public void testDuplicateVersions() throws IOException {
     String family = "Family";
     byte[] familyBytes = Bytes.toBytes("Family");
-    TableName table = TableName.valueOf("TestDuplicateVersions");
+    TableName table = TableName.valueOf(name.getMethodName());
 
     HColumnDescriptor hcd =
         new HColumnDescriptor(familyBytes).setMaxVersions(1000);
@@ -60,9 +62,8 @@ public class TestColumnSeeking {
     HTableDescriptor htd = new HTableDescriptor(table);
     htd.addFamily(hcd);
     HRegionInfo info = new HRegionInfo(table, null, null, false);
-    HRegion region =
-        HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), TEST_UTIL
-            .getConfiguration(), htd);
+    // Set this so that the archiver writes to the temp dir as well.
+    HRegion region = TEST_UTIL.createLocalHRegion(info, htd);
     try {
       List<String> rows = generateRandomWords(10, "row");
       List<String> allColumns = generateRandomWords(10, "column");
@@ -166,8 +167,7 @@ public class TestColumnSeeking {
   public void testReseeking() throws IOException {
     String family = "Family";
     byte[] familyBytes = Bytes.toBytes("Family");
-    TableName table =
-        TableName.valueOf("TestSingleVersions");
+    TableName table = TableName.valueOf(name.getMethodName());
 
     HTableDescriptor htd = new HTableDescriptor(table);
     HColumnDescriptor hcd = new HColumnDescriptor(family);
@@ -175,9 +175,7 @@ public class TestColumnSeeking {
     htd.addFamily(hcd);
 
     HRegionInfo info = new HRegionInfo(table, null, null, false);
-    HRegion region =
-        HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), TEST_UTIL
-            .getConfiguration(), htd);
+    HRegion region = TEST_UTIL.createLocalHRegion(info, htd);
 
     List<String> rows = generateRandomWords(10, "row");
     List<String> allColumns = generateRandomWords(100, "column");

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java Thu Aug 22 21:36:29 2013
@@ -17,11 +17,17 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
@@ -35,10 +41,16 @@ import org.apache.hadoop.hbase.util.Byte
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 
 @Category(SmallTests.class)
-public class TestKeepDeletes extends HBaseTestCase {
+public class TestKeepDeletes {
+  HBaseTestingUtility hbu = HBaseTestingUtility.createLocalHTU();
   private final byte[] T0 = Bytes.toBytes("0");
   private final byte[] T1 = Bytes.toBytes("1");
   private final byte[] T2 = Bytes.toBytes("2");
@@ -50,9 +62,10 @@ public class TestKeepDeletes extends HBa
   private final byte[] c0 = COLUMNS[0];
   private final byte[] c1 = COLUMNS[1];
 
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
+  @Rule public TestName name = new TestName();
+  
+  @Before
+  public void setUp() throws Exception {
     /* HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on
      * implicit RS timing.
      * Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete
@@ -66,9 +79,8 @@ public class TestKeepDeletes extends HBa
     EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
   }
 
-  @Override
-  protected void tearDown() throws Exception {
-    super.tearDown();
+  @After
+  public void tearDown() throws Exception {
     EnvironmentEdgeManager.reset();
   }
 
@@ -78,11 +90,12 @@ public class TestKeepDeletes extends HBa
    * Column Delete markers are versioned
    * Time range scan of deleted rows are possible
    */
+  @Test
   public void testBasicScenario() throws Exception {
     // keep 3 versions, rows do not expire
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -174,11 +187,12 @@ public class TestKeepDeletes extends HBa
    * if the store does not have KEEP_DELETED_CELLS enabled.
    * (can be changed easily)
    */
+  @Test
   public void testRawScanWithoutKeepingDeletes() throws Exception {
     // KEEP_DELETED_CELLS is NOT enabled
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -218,11 +232,12 @@ public class TestKeepDeletes extends HBa
   /**
    * basic verification of existing behavior
    */
+  @Test
   public void testWithoutKeepingDeletes() throws Exception {
     // KEEP_DELETED_CELLS is NOT enabled
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -262,10 +277,11 @@ public class TestKeepDeletes extends HBa
   /**
    * The ExplicitColumnTracker does not support "raw" scanning.
    */
+  @Test
   public void testRawScanWithColumns() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     Scan s = new Scan();
     s.setRaw(true);
@@ -273,7 +289,7 @@ public class TestKeepDeletes extends HBa
     s.addColumn(c0, c0);
 
     try {
-      InternalScanner scan = region.getScanner(s);
+      region.getScanner(s);
       fail("raw scanner with columns should have failed");
     } catch (org.apache.hadoop.hbase.DoNotRetryIOException dnre) {
       // ok!
@@ -285,10 +301,11 @@ public class TestKeepDeletes extends HBa
   /**
    * Verify that "raw" scanning mode return delete markers and deletes rows.
    */
+  @Test
   public void testRawScan() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -320,11 +337,11 @@ public class TestKeepDeletes extends HBa
     scan.next(kvs);
     assertEquals(8, kvs.size());
     assertTrue(kvs.get(0).isDeleteFamily());
-    assertEquals(kvs.get(1).getValue(), T3);
+    assertArrayEquals(kvs.get(1).getValue(), T3);
     assertTrue(kvs.get(2).isDelete());
     assertTrue(kvs.get(3).isDeleteType());
-    assertEquals(kvs.get(4).getValue(), T2);
-    assertEquals(kvs.get(5).getValue(), T1);
+    assertArrayEquals(kvs.get(4).getValue(), T2);
+    assertArrayEquals(kvs.get(5).getValue(), T1);
     // we have 3 CFs, so there are two more delete markers
     assertTrue(kvs.get(6).isDeleteFamily());
     assertTrue(kvs.get(7).isDeleteFamily());
@@ -350,7 +367,7 @@ public class TestKeepDeletes extends HBa
     scan.next(kvs);
     assertEquals(4, kvs.size());
     assertTrue(kvs.get(0).isDeleteFamily());
-    assertEquals(kvs.get(1).getValue(), T1);
+    assertArrayEquals(kvs.get(1).getValue(), T1);
     // we have 3 CFs
     assertTrue(kvs.get(2).isDeleteFamily());
     assertTrue(kvs.get(3).isDeleteFamily());
@@ -364,7 +381,7 @@ public class TestKeepDeletes extends HBa
     kvs = new ArrayList<KeyValue>();
     scan.next(kvs);
     assertEquals(2, kvs.size());
-    assertEquals(kvs.get(0).getValue(), T3);
+    assertArrayEquals(kvs.get(0).getValue(), T3);
     assertTrue(kvs.get(1).isDelete());
 
 
@@ -374,10 +391,11 @@ public class TestKeepDeletes extends HBa
   /**
    * Verify that delete markers are removed from an otherwise empty store.
    */
+  @Test
   public void testDeleteMarkerExpirationEmptyStore() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
 
@@ -416,10 +434,11 @@ public class TestKeepDeletes extends HBa
   /**
    * Test delete marker removal from store files.
    */
+  @Test
   public void testDeleteMarkerExpiration() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
 
@@ -478,10 +497,11 @@ public class TestKeepDeletes extends HBa
   /**
    * Verify correct range demarcation
    */
+  @Test
   public void testRanges() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -559,10 +579,11 @@ public class TestKeepDeletes extends HBa
    * with their respective puts and removed correctly by
    * versioning (i.e. not relying on the store earliestPutTS).
    */
+  @Test
   public void testDeleteMarkerVersioning() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
     Put p = new Put(T1, ts);
@@ -652,9 +673,9 @@ public class TestKeepDeletes extends HBa
    * Verify scenarios with multiple CFs and columns
    */
   public void testWithMixedCFs() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
         HConstants.FOREVER, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis();
 
@@ -702,9 +723,10 @@ public class TestKeepDeletes extends HBa
    * Test keeping deleted rows together with min versions set
    * @throws Exception
    */
+  @Test
   public void testWithMinVersions() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, true);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, true);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past
 
@@ -811,7 +833,7 @@ public class TestKeepDeletes extends HBa
     List<KeyValue> kvs = r.getColumn(fam, col);
     assertEquals(kvs.size(), vals.length);
     for (int i=0;i<vals.length;i++) {
-      assertEquals(kvs.get(i).getValue(), vals[i]);
+      assertArrayEquals(kvs.get(i).getValue(), vals[i]);
     }
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java Thu Aug 22 21:36:29 2013
@@ -33,16 +33,23 @@ import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueTestUtil;
+import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.experimental.categories.Category;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
-import org.junit.experimental.categories.Category;
 
 /** memstore test case */
 @Category(MediumTests.class)
@@ -912,7 +919,7 @@ public class TestMemStore extends TestCa
     try {
       EnvironmentEdgeForMemstoreTest edge = new EnvironmentEdgeForMemstoreTest();
       EnvironmentEdgeManager.injectEdge(edge);
-      HBaseTestingUtility hbaseUtility = new HBaseTestingUtility(conf);
+      HBaseTestingUtility hbaseUtility = HBaseTestingUtility.createLocalHTU(conf);
       HRegion region = hbaseUtility.createTestRegion("foobar", new HColumnDescriptor("foo"));
 
       Map<byte[], Store> stores = region.getStores();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java Thu Aug 22 21:36:29 2013
@@ -18,10 +18,18 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -29,13 +37,17 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.filter.TimestampsFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 
 /**
  * Test Minimum Versions feature (HBASE-4071).
  */
 @Category(SmallTests.class)
-public class TestMinVersions extends HBaseTestCase {
+public class TestMinVersions {
+  HBaseTestingUtility hbu = HBaseTestingUtility.createLocalHTU();
   private final byte[] T0 = Bytes.toBytes("0");
   private final byte[] T1 = Bytes.toBytes("1");
   private final byte[] T2 = Bytes.toBytes("2");
@@ -45,12 +57,15 @@ public class TestMinVersions extends HBa
 
   private final byte[] c0 = COLUMNS[0];
 
+  @Rule public TestName name = new TestName();
+
   /**
    * Verify behavior of getClosestBefore(...)
    */
+  @Test
   public void testGetClosestBefore() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 1, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
     try {
 
       // 2s in the past
@@ -95,10 +110,11 @@ public class TestMinVersions extends HBa
    * Test mixed memstore and storefile scanning
    * with minimum versions.
    */
+  @Test
   public void testStoreMemStore() throws Exception {
     // keep 3 versions minimum
-    HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
     // 2s in the past
     long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
 
@@ -149,9 +165,10 @@ public class TestMinVersions extends HBa
   /**
    * Make sure the Deletes behave as expected with minimum versions
    */
+  @Test
   public void testDelete() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     // 2s in the past
     long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
@@ -206,9 +223,10 @@ public class TestMinVersions extends HBa
   /**
    * Make sure the memstor behaves correctly with minimum versions
    */
+  @Test
   public void testMemStore() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
 
     // 2s in the past
     long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
@@ -279,10 +297,11 @@ public class TestMinVersions extends HBa
   /**
    * Verify basic minimum versions functionality
    */
+  @Test
   public void testBaseCase() throws Exception {
     // 1 version minimum, 1000 versions maximum, ttl = 1s
-    HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
     try {
 
       // 2s in the past
@@ -370,9 +389,10 @@ public class TestMinVersions extends HBa
    * Verify that basic filters still behave correctly with
    * minimum versions enabled.
    */
+  @Test
   public void testFilters() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
-    HRegion region = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+    HRegion region = hbu.createLocalHRegion(htd, null, null);
     final byte [] c1 = COLUMNS[1];
 
     // 2s in the past
@@ -444,7 +464,7 @@ public class TestMinVersions extends HBa
     List<KeyValue> kvs = r.getColumn(col, col);
     assertEquals(kvs.size(), vals.length);
     for (int i=0;i<vals.length;i++) {
-      assertEquals(kvs.get(i).getValue(), vals[i]);
+      assertArrayEquals(kvs.get(i).getValue(), vals[i]);
     }
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java Thu Aug 22 21:36:29 2013
@@ -104,7 +104,7 @@ public class TestMultiColumnScanner {
   /** The probability to delete a row/column pair */
   private static final double DELETE_PROBABILITY = 0.02;
 
-  private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   private final Compression.Algorithm comprAlgo;
   private final BloomType bloomType;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java Thu Aug 22 21:36:29 2013
@@ -76,8 +76,7 @@ public class TestScanWithBloomError {
   private FileSystem fs;
   private Configuration conf;
 
-  private final static HBaseTestingUtility TEST_UTIL =
-      new HBaseTestingUtility();
+  private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   @Parameters
   public static final Collection<Object[]> parameters() {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java Thu Aug 22 21:36:29 2013
@@ -18,6 +18,15 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.apache.hadoop.hbase.HBaseTestingUtility.START_KEY_BYTES;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -25,6 +34,8 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestCase.HRegionIncommon;
+import org.apache.hadoop.hbase.HBaseTestCase.ScannerIncommon;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -44,14 +55,20 @@ import org.apache.hadoop.hbase.filter.In
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
 
 /**
  * Test of a long-lived scanner validating as we go.
  */
 @Category(SmallTests.class)
-public class TestScanner extends HBaseTestCase {
+public class TestScanner {
+  @Rule public TestName name = new TestName();
   private final Log LOG = LogFactory.getLog(this.getClass());
+  private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   private static final byte [] FIRST_ROW = HConstants.EMPTY_START_ROW;
   private static final byte [][] COLS = { HConstants.CATALOG_FAMILY };
@@ -104,12 +121,13 @@ public class TestScanner extends HBaseTe
    * Test basic stop row filter works.
    * @throws Exception
    */
+  @Test
   public void testStopRow() throws Exception {
     byte [] startrow = Bytes.toBytes("bbb");
     byte [] stoprow = Bytes.toBytes("ccc");
     try {
-      this.r = createNewHRegion(TESTTABLEDESC, null, null);
-      addContent(this.r, HConstants.CATALOG_FAMILY);
+      this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+      HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
       List<KeyValue> results = new ArrayList<KeyValue>();
       // Do simple test of getting one row only first.
       Scan scan = new Scan(Bytes.toBytes("abc"), Bytes.toBytes("abd"));
@@ -178,10 +196,11 @@ public class TestScanner extends HBaseTe
     s.close();
   }
 
+  @Test
   public void testFilters() throws IOException {
     try {
-      this.r = createNewHRegion(TESTTABLEDESC, null, null);
-      addContent(this.r, HConstants.CATALOG_FAMILY);
+      this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+      HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
       byte [] prefix = Bytes.toBytes("ab");
       Filter newFilter = new PrefixFilter(prefix);
       Scan scan = new Scan();
@@ -204,10 +223,11 @@ public class TestScanner extends HBaseTe
    * NPEs but instead a UnknownScannerException. HBASE-2503
    * @throws Exception
    */
+  @Test
   public void testRaceBetweenClientAndTimeout() throws Exception {
     try {
-      this.r = createNewHRegion(TESTTABLEDESC, null, null);
-      addContent(this.r, HConstants.CATALOG_FAMILY);
+      this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+      HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
       Scan scan = new Scan();
       InternalScanner s = r.getScanner(scan);
       List<KeyValue> results = new ArrayList<KeyValue>();
@@ -228,9 +248,10 @@ public class TestScanner extends HBaseTe
   /** The test!
    * @throws IOException
    */
+  @Test
   public void testScanner() throws IOException {
     try {
-      r = createNewHRegion(TESTTABLEDESC, null, null);
+      r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
       region = new HRegionIncommon(r);
 
       // Write information to the meta table
@@ -250,7 +271,7 @@ public class TestScanner extends HBaseTe
       // Close and re-open
 
       r.close();
-      r = openClosedRegion(r);
+      r = HRegion.openHRegion(r, null);
       region = new HRegionIncommon(r);
 
       // Verify we can get the data back now that it is on disk.
@@ -288,7 +309,7 @@ public class TestScanner extends HBaseTe
       // Close and reopen
 
       r.close();
-      r = openClosedRegion(r);
+      r = HRegion.openHRegion(r,null);
       region = new HRegionIncommon(r);
 
       // Validate again
@@ -323,7 +344,7 @@ public class TestScanner extends HBaseTe
       // Close and reopen
 
       r.close();
-      r = openClosedRegion(r);
+      r = HRegion.openHRegion(r,null);
       region = new HRegionIncommon(r);
 
       // Validate again
@@ -440,11 +461,12 @@ public class TestScanner extends HBaseTe
    * HBase-910.
    * @throws Exception
    */
+  @Test
   public void testScanAndSyncFlush() throws Exception {
-    this.r = createNewHRegion(TESTTABLEDESC, null, null);
+    this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
     HRegionIncommon hri = new HRegionIncommon(r);
     try {
-        LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
+        LOG.info("Added: " + HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
             Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
       int count = count(hri, -1, false);
       assertEquals(count, count(hri, 100, false)); // do a sync flush.
@@ -462,11 +484,12 @@ public class TestScanner extends HBaseTe
    *
    * @throws Exception
    */
+  @Test
   public void testScanAndRealConcurrentFlush() throws Exception {
-    this.r = createNewHRegion(TESTTABLEDESC, null, null);
+    this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
     HRegionIncommon hri = new HRegionIncommon(r);
     try {
-        LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
+        LOG.info("Added: " + HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
             Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
       int count = count(hri, -1, false);
       assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
@@ -484,16 +507,17 @@ public class TestScanner extends HBaseTe
    *
    * @throws Exception
    */
+  @Test
   @SuppressWarnings("deprecation")
   public void testScanAndConcurrentMajorCompact() throws Exception {
-    HTableDescriptor htd = createTableDescriptor(getName());
-    this.r = createNewHRegion(htd, null, null);
+    HTableDescriptor htd = TEST_UTIL.createTableDescriptor(name.getMethodName());
+    this.r = TEST_UTIL.createLocalHRegion(htd, null, null);
     HRegionIncommon hri = new HRegionIncommon(r);
 
     try {
-      addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
+      HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
           firstRowBytes, secondRowBytes);
-      addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
+      HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
           firstRowBytes, secondRowBytes);
 
       Delete dc = new Delete(firstRowBytes);
@@ -502,9 +526,9 @@ public class TestScanner extends HBaseTe
       r.delete(dc);
       r.flushcache();
 
-      addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
+      HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
           secondRowBytes, thirdRowBytes);
-      addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
+      HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
           secondRowBytes, thirdRowBytes);
       r.flushcache();
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java Thu Aug 22 21:36:29 2013
@@ -117,8 +117,7 @@ public class TestSeekOptimizations {
 
   private long totalSeekDiligent, totalSeekLazy;
   
-  private final static HBaseTestingUtility TEST_UTIL =
-      new HBaseTestingUtility();
+  private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
 
   @Parameters
   public static final Collection<Object[]> parameters() {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java Thu Aug 22 21:36:29 2013
@@ -25,8 +25,6 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -34,11 +32,12 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.RegionTransition;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.executor.EventType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.MockRegionServerServices;
 import org.apache.hadoop.hbase.util.MockServer;
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;
 import org.apache.zookeeper.KeeperException;
@@ -56,7 +55,7 @@ import org.mockito.Mockito;
 @Category(MediumTests.class)
 public class TestCloseRegionHandler {
   static final Log LOG = LogFactory.getLog(TestCloseRegionHandler.class);
-  private final static HBaseTestingUtility HTU = new HBaseTestingUtility();
+  private final static HBaseTestingUtility HTU = HBaseTestingUtility.createLocalHTU();
   private static final HTableDescriptor TEST_HTD =
     new HTableDescriptor(TableName.valueOf("TestCloseRegionHandler"));
   private HRegionInfo TEST_HRI;
@@ -93,14 +92,12 @@ public class TestCloseRegionHandler {
   @Test public void testFailedFlushAborts()
   throws IOException, NodeExistsException, KeeperException {
     final Server server = new MockServer(HTU, false);
-    final RegionServerServices rss = new MockRegionServerServices();
+    final RegionServerServices rss = HTU.createMockRegionServerService();
     HTableDescriptor htd = TEST_HTD;
     final HRegionInfo hri =
       new HRegionInfo(htd.getTableName(), HConstants.EMPTY_END_ROW,
         HConstants.EMPTY_END_ROW);
-    HRegion region =
-      HRegion.createHRegion(hri, HTU.getDataTestDir(),
-        HTU.getConfiguration(), htd);
+    HRegion region = HTU.createLocalHRegion(hri,  htd);
     try {
       assertNotNull(region);
       // Spy on the region so can throw exception when close is called.
@@ -140,9 +137,8 @@ public class TestCloseRegionHandler {
      @Test public void testZKClosingNodeVersionMismatch()
      throws IOException, NodeExistsException, KeeperException, DeserializationException {
        final Server server = new MockServer(HTU);
-       final MockRegionServerServices rss = new MockRegionServerServices();
-       rss.setFileSystem(HTU.getTestFileSystem());
-   
+       final RegionServerServices rss = HTU.createMockRegionServerService();
+
        HTableDescriptor htd = TEST_HTD;
        final HRegionInfo hri = TEST_HRI;
    
@@ -178,8 +174,7 @@ public class TestCloseRegionHandler {
      @Test public void testCloseRegion()
      throws IOException, NodeExistsException, KeeperException, DeserializationException {
        final Server server = new MockServer(HTU);
-       final MockRegionServerServices rss = new MockRegionServerServices();
-       rss.setFileSystem(HTU.getTestFileSystem());
+       final RegionServerServices rss = HTU.createMockRegionServerService();
    
        HTableDescriptor htd = TEST_HTD;
        HRegionInfo hri = TEST_HRI;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java Thu Aug 22 21:36:29 2013
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.executor.
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.MockRegionServerServices;
 import org.apache.hadoop.hbase.util.MockServer;
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@@ -48,7 +47,7 @@ import org.junit.experimental.categories
 @Category(MediumTests.class)
 public class TestOpenRegionHandler {
   static final Log LOG = LogFactory.getLog(TestOpenRegionHandler.class);
-  private final static HBaseTestingUtility HTU = new HBaseTestingUtility();
+  private final static HBaseTestingUtility HTU = HBaseTestingUtility.createLocalHTU();
   private static HTableDescriptor TEST_HTD;
   private HRegionInfo TEST_HRI;
 
@@ -88,7 +87,7 @@ public class TestOpenRegionHandler {
   @Test public void testYankingRegionFromUnderIt()
   throws IOException, NodeExistsException, KeeperException {
     final Server server = new MockServer(HTU);
-    final RegionServerServices rss = new MockRegionServerServices(HTU.getZooKeeperWatcher());
+    final RegionServerServices rss = HTU.createMockRegionServerService();
 
     HTableDescriptor htd = TEST_HTD;
     final HRegionInfo hri = TEST_HRI;
@@ -133,7 +132,7 @@ public class TestOpenRegionHandler {
   @Test
   public void testFailedOpenRegion() throws Exception {
     Server server = new MockServer(HTU);
-    RegionServerServices rsServices = new MockRegionServerServices();
+    RegionServerServices rsServices = HTU.createMockRegionServerService();
 
     // Create it OFFLINE, which is what it expects
     ZKAssign.createNodeOffline(server.getZooKeeper(), TEST_HRI, server.getServerName());
@@ -160,7 +159,7 @@ public class TestOpenRegionHandler {
   @Test
   public void testFailedUpdateMeta() throws Exception {
     Server server = new MockServer(HTU);
-    RegionServerServices rsServices = new MockRegionServerServices();
+    RegionServerServices rsServices = HTU.createMockRegionServerService();
 
     // Create it OFFLINE, which is what it expects
     ZKAssign.createNodeOffline(server.getZooKeeper(), TEST_HRI, server.getServerName());
@@ -187,7 +186,7 @@ public class TestOpenRegionHandler {
   @Test
   public void testTransitionToFailedOpenEvenIfCleanupFails() throws Exception {
     Server server = new MockServer(HTU);
-    RegionServerServices rsServices = new MockRegionServerServices();
+    RegionServerServices rsServices = HTU.createMockRegionServerService();
     // Create it OFFLINE, which is what it expects
     ZKAssign.createNodeOffline(server.getZooKeeper(), TEST_HRI, server.getServerName());
     // Create the handler
@@ -216,8 +215,7 @@ public class TestOpenRegionHandler {
   @Test
   public void testTransitionToFailedOpenFromOffline() throws Exception {
     Server server = new MockServer(HTU);
-    RegionServerServices rsServices = new MockRegionServerServices(server.getZooKeeper(),
-        server.getServerName());
+    RegionServerServices rsServices = HTU.createMockRegionServerService(server.getServerName());
     // Create it OFFLINE, which is what it expects
     ZKAssign.createNodeOffline(server.getZooKeeper(), TEST_HRI, server.getServerName());
     // Create the handler

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java?rev=1516622&r1=1516621&r2=1516622&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java Thu Aug 22 21:36:29 2013
@@ -29,28 +29,24 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.ConcurrentMap;
 
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.BlockingService;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterId;
 import org.apache.hadoop.hbase.Coprocessor;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.catalog.CatalogTracker;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
+import org.apache.hadoop.hbase.ipc.RequestContext;
 import org.apache.hadoop.hbase.ipc.RpcClient;
 import org.apache.hadoop.hbase.ipc.RpcServer;
-import org.apache.hadoop.hbase.ipc.RequestContext;
 import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -62,7 +58,6 @@ import org.apache.hadoop.hbase.security.
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.MockRegionServerServices;
 import org.apache.hadoop.hbase.util.Sleeper;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.util.Threads;
@@ -81,6 +76,11 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import com.google.protobuf.BlockingRpcChannel;
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
 /**
  * Tests for authentication token creation and usage
  */
@@ -180,10 +180,7 @@ public class TestTokenAuthentication {
       this.rpcServer.start();
 
       // mock RegionServerServices to provide to coprocessor environment
-      final RegionServerServices mockServices = new MockRegionServerServices() {
-        @Override
-        public RpcServerInterface getRpcServer() { return rpcServer; }
-      };
+      final RegionServerServices mockServices = TEST_UTIL.createMockRegionServerService(rpcServer);
 
       // mock up coprocessor environment
       super.start(new RegionCoprocessorEnvironment() {