You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2014/10/01 01:36:04 UTC

[1/2] HBASE-12038 Replace internal uses of signatures with byte[] and String tableNames to use the TableName equivalents (Solomon Duskis)

Repository: hbase
Updated Branches:
  refs/heads/master c4107d530 -> b5783795c


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index b272993..b1230bb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -111,7 +111,7 @@ public class TestCellACLs extends SecureTestUtil {
     rsHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf);
 
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     // create a set of test users
     USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
@@ -133,7 +133,7 @@ public class TestCellACLs extends SecureTestUtil {
     htd.setOwner(USER_OWNER);
     htd.addFamily(hcd);
     admin.createTable(htd, new byte[][] { Bytes.toBytes("s") });
-    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName());
+    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
index 0b1fb64..f4b3882 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
@@ -99,7 +99,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
     rsHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf);
 
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     // create a set of test users
     USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
@@ -129,7 +129,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
 
     admin.createTable(htd);
 
-    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName());
+    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName());
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 7bdf55a..b795127 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -98,7 +98,7 @@ public class TestTablePermissions {
     UTIL.startMiniCluster();
 
     // Wait for the ACL table to become available
-    UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     ZKW = new ZooKeeperWatcher(UTIL.getConfiguration(),
       "TestTablePermissions", ABORTABLE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
index 03510dc..d4f5d67 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
@@ -124,7 +124,7 @@ public class TestVisibilityLabelsWithACL {
       public Void run() throws Exception {
         Scan s = new Scan();
         s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
-        Table t = new HTable(conf, table.getTableName());
+        Table t = new HTable(conf, table.getName());
         try {
           ResultScanner scanner = t.getScanner(s);
           Result result = scanner.next();
@@ -153,7 +153,7 @@ public class TestVisibilityLabelsWithACL {
       public Void run() throws Exception {
         Scan s = new Scan();
         s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
-        Table t = new HTable(conf, table.getTableName());
+        Table t = new HTable(conf, table.getName());
         try {
           ResultScanner scanner = t.getScanner(s);
           Result[] result = scanner.next(5);
@@ -179,7 +179,7 @@ public class TestVisibilityLabelsWithACL {
       public Void run() throws Exception {
         Get g = new Get(row1);
         g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
-        Table t = new HTable(conf, table.getTableName());
+        Table t = new HTable(conf, table.getName());
         try {
           Result result = t.get(g);
           assertTrue(!result.isEmpty());
@@ -208,7 +208,7 @@ public class TestVisibilityLabelsWithACL {
       public Void run() throws Exception {
         Get g = new Get(row1);
         g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
-        Table t = new HTable(conf, table.getTableName());
+        Table t = new HTable(conf, table.getName());
         try {
           Result result = t.get(g);
           assertTrue(result.isEmpty());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
index c8f2222..cb5fff1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
@@ -145,7 +145,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
     // Scan the visibility label
     Scan s = new Scan();
     s.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL));
-    Table ht = new HTable(conf, LABELS_TABLE_NAME.getName());
+    Table ht = new HTable(conf, LABELS_TABLE_NAME);
     int i = 0;
     try {
       ResultScanner scanner = ht.getScanner(s);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
index 78ec873..19d5965 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
@@ -49,6 +49,6 @@ public class TestSecureExportSnapshot extends TestExportSnapshot {
     TEST_UTIL.startMiniMapReduceCluster();
 
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
index a63620a..e8d22b8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
@@ -62,7 +62,7 @@ public class TestProcessBasedCluster {
           HTestConst.DEFAULT_CF_STR_SET,
           HColumnDescriptor.DEFAULT_VERSIONS, COLS_PER_ROW, FLUSHES, NUM_REGIONS,
           ROWS_PER_FLUSH);
-      Table table = new HTable(TEST_UTIL.getConfiguration(), HTestConst.DEFAULT_TABLE_BYTES);
+      Table table = new HTable(TEST_UTIL.getConfiguration(), HTestConst.DEFAULT_TABLE);
       ResultScanner scanner = table.getScanner(HTestConst.DEFAULT_CF_BYTES);
       Result result;
       int rows = 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
index 451da3c..c55f946 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
@@ -280,8 +280,7 @@ public class TestRegionSplitter {
         final Configuration conf = UTIL.getConfiguration();
         conf.setInt("split.count", numRegions);
         SplitAlgorithm splitAlgo = RegionSplitter.newSplitAlgoInstance(conf, splitClass);
-        RegionSplitter.createPresplitTable(tableName.getNameAsString(), splitAlgo,
-                new String[] {CF_NAME}, conf);
+        RegionSplitter.createPresplitTable(tableName, splitAlgo, new String[] {CF_NAME}, conf);
         verifyBounds(expectedBounds, tableName);
     }
 
@@ -301,7 +300,7 @@ public class TestRegionSplitter {
         // Set this larger than the number of splits so RegionSplitter won't block
         conf.setInt("split.outstanding", 5);
         SplitAlgorithm splitAlgo = RegionSplitter.newSplitAlgoInstance(conf, splitClass);
-        RegionSplitter.rollingSplit(tableName.getNameAsString(), splitAlgo, conf);
+        RegionSplitter.rollingSplit(tableName, splitAlgo, conf);
         verifyBounds(expectedBounds, tableName);
     }
 
@@ -328,6 +327,7 @@ public class TestRegionSplitter {
                     startBoundaryIndex+1);
             assertEquals(0, Bytes.compareTo(regionEnd, expectedRegionEnd));
         }
+        hTable.close();
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
index 891ea57..7cc0061 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
@@ -42,14 +42,14 @@ import org.junit.runners.Suite;
 @Category({ClientTests.class, MediumTests.class})
 public class TestHTablePool {
   private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-  private final static byte[] TABLENAME = Bytes.toBytes("TestHTablePool");
+  private final static String TABLENAME = "TestHTablePool";
 
   public abstract static class TestHTablePoolType {
 
     @BeforeClass
     public static void setUpBeforeClass() throws Exception {
       TEST_UTIL.startMiniCluster(1);
-      TEST_UTIL.createTable(TABLENAME, HConstants.CATALOG_FAMILY);
+      TEST_UTIL.createTable(TableName.valueOf(TABLENAME), HConstants.CATALOG_FAMILY);
     }
 
     @AfterClass
@@ -63,7 +63,7 @@ public class TestHTablePool {
 		public void testTableWithStringName() throws Exception {
 			HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
 					Integer.MAX_VALUE, getPoolType());
-			String tableName = Bytes.toString(TABLENAME);
+			String tableName = TABLENAME;
 
 			// Request a table from an empty pool
 			Table table = pool.getTable(tableName);
@@ -132,7 +132,7 @@ public class TestHTablePool {
     public void testProxyImplementationReturned() {
       HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
           Integer.MAX_VALUE);
-      String tableName = Bytes.toString(TABLENAME);// Request a table from
+      String tableName = TABLENAME;// Request a table from
                               // an
                               // empty pool
       Table table = pool.getTable(tableName);
@@ -145,7 +145,7 @@ public class TestHTablePool {
     public void testDeprecatedUsagePattern() throws IOException {
       HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
           Integer.MAX_VALUE);
-      String tableName = Bytes.toString(TABLENAME);// Request a table from
+      String tableName = TABLENAME;// Request a table from
                               // an
                               // empty pool
 
@@ -167,14 +167,14 @@ public class TestHTablePool {
     public void testReturnDifferentTable() throws IOException {
       HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
           Integer.MAX_VALUE);
-      String tableName = Bytes.toString(TABLENAME);// Request a table from
+      String tableName = TABLENAME;// Request a table from
                               // an
                               // empty pool
 
       // get table will return proxy implementation
       final Table table = pool.getTable(tableName);
       HTableInterface alienTable = new HTable(TEST_UTIL.getConfiguration(),
-          TABLENAME) {
+          TableName.valueOf(TABLENAME)) {
         // implementation doesn't matter as long the table is not from
         // pool
       };
@@ -191,7 +191,7 @@ public class TestHTablePool {
     public void testHTablePoolCloseTwice() throws Exception {
       HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
           Integer.MAX_VALUE, getPoolType());
-      String tableName = Bytes.toString(TABLENAME);
+      String tableName = TABLENAME;
 
       // Request a table from an empty pool
       Table table = pool.getTable(tableName);
@@ -278,12 +278,12 @@ public class TestHTablePool {
 			}
 
 			Assert.assertEquals(4,
-					pool.getCurrentPoolSize(Bytes.toString(TABLENAME)));
+					pool.getCurrentPoolSize(TABLENAME));
 
 			pool.closeTablePool(TABLENAME);
 
 			Assert.assertEquals(0,
-					pool.getCurrentPoolSize(Bytes.toString(TABLENAME)));
+					pool.getCurrentPoolSize(TABLENAME));
 		}
 	}
 
@@ -354,12 +354,12 @@ public class TestHTablePool {
 			}
 
 			Assert.assertEquals(1,
-					pool.getCurrentPoolSize(Bytes.toString(TABLENAME)));
+					pool.getCurrentPoolSize(TABLENAME));
 
 			pool.closeTablePool(TABLENAME);
 
 			Assert.assertEquals(0,
-					pool.getCurrentPoolSize(Bytes.toString(TABLENAME)));
+					pool.getCurrentPoolSize(TABLENAME));
 		}
 	}
 


[2/2] git commit: HBASE-12038 Replace internal uses of signatures with byte[] and String tableNames to use the TableName equivalents (Solomon Duskis)

Posted by st...@apache.org.
HBASE-12038 Replace internal uses of signatures with byte[] and String tableNames to use the TableName equivalents (Solomon Duskis)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5783795
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5783795
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5783795

Branch: refs/heads/master
Commit: b5783795cded0f5d52dd9c524d964ec6efc0046a
Parents: c4107d5
Author: stack <st...@apache.org>
Authored: Tue Sep 30 16:35:55 2014 -0700
Committer: stack <st...@apache.org>
Committed: Tue Sep 30 16:35:55 2014 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/client/HTableFactory.java      |  3 +-
 .../security/access/AccessControlClient.java    |  6 ++--
 .../security/visibility/VisibilityClient.java   |  6 ++--
 .../example/TestBulkDeleteProtocol.java         | 26 ++++++++---------
 .../example/TestRowCountEndpoint.java           |  4 +--
 .../hadoop/hbase/mapred/HRegionPartitioner.java |  3 +-
 .../hadoop/hbase/mapred/TableInputFormat.java   |  4 ++-
 .../hadoop/hbase/mapred/TableOutputFormat.java  |  3 +-
 .../mapreduce/MultiTableInputFormatBase.java    |  7 +++--
 .../hbase/mapreduce/MultiTableOutputFormat.java |  3 +-
 .../hbase/mapreduce/TableMapReduceUtil.java     | 30 ++++++++++++++++++++
 .../org/apache/hadoop/hbase/tool/Canary.java    |  4 +--
 .../hadoop/hbase/util/RegionSplitter.java       |  8 +++---
 .../hadoop/hbase/HBaseTestingUtility.java       | 16 +++++------
 .../apache/hadoop/hbase/TestAcidGuarantees.java |  2 +-
 .../apache/hadoop/hbase/TestMultiVersions.java  |  4 +--
 .../apache/hadoop/hbase/client/TestAdmin.java   | 20 ++++++-------
 .../hadoop/hbase/client/TestFromClientSide.java |  7 ++---
 .../org/apache/hadoop/hbase/client/TestHCM.java | 18 ++++++------
 .../hadoop/hbase/client/TestMultiParallel.java  |  2 +-
 .../hbase/coprocessor/TestClassLoading.java     |  2 +-
 .../hbase/filter/TestFilterWithScanLimits.java  |  4 +--
 .../hadoop/hbase/filter/TestFilterWrapper.java  |  4 +--
 .../hbase/mapreduce/TestHFileOutputFormat.java  |  6 ++--
 .../hbase/mapreduce/TestHFileOutputFormat2.java |  4 +--
 .../TestSecureLoadIncrementalHFiles.java        |  2 +-
 ...ecureLoadIncrementalHFilesSplitRecovery.java |  2 +-
 .../hbase/mapreduce/TestTableMapReduceBase.java |  2 +-
 .../hbase/mapreduce/TestTimeRangeMapRed.java    |  6 ++--
 .../TestMasterOperationsForRegionReplicas.java  |  8 +++---
 .../hbase/master/TestRegionPlacement.java       |  6 ++--
 .../hadoop/hbase/master/TestRestartCluster.java |  6 ++--
 .../hbase/master/TestTableLockManager.java      |  2 +-
 .../hadoop/hbase/quotas/TestQuotaAdmin.java     |  2 +-
 .../hadoop/hbase/quotas/TestQuotaTableUtil.java |  2 +-
 .../hadoop/hbase/quotas/TestQuotaThrottle.java  |  2 +-
 .../regionserver/TestEncryptionKeyRotation.java |  2 +-
 .../hbase/regionserver/TestFSErrorsExposed.java |  4 +--
 .../hbase/regionserver/TestJoinedScanners.java  |  4 +--
 .../replication/TestMasterReplication.java      |  6 ++--
 .../replication/TestMultiSlaveReplication.java  |  4 +--
 .../replication/TestPerTableCFReplication.java  | 10 +++----
 .../hbase/replication/TestReplicationBase.java  |  4 +--
 .../replication/TestReplicationSmallTests.java  |  2 +-
 .../replication/TestReplicationSyncUpTool.java  | 12 ++++----
 .../replication/TestReplicationWithTags.java    |  4 +--
 .../hbase/security/access/SecureTestUtil.java   |  6 ++--
 .../security/access/TestAccessController.java   |  4 +--
 .../security/access/TestAccessController2.java  |  4 +--
 .../access/TestCellACLWithMultipleVersions.java |  4 +--
 .../hbase/security/access/TestCellACLs.java     |  4 +--
 .../access/TestScanEarlyTermination.java        |  4 +--
 .../security/access/TestTablePermissions.java   |  2 +-
 .../visibility/TestVisibilityLabelsWithACL.java |  8 +++---
 ...ibilityLabelsWithDefaultVisLabelService.java |  2 +-
 .../snapshot/TestSecureExportSnapshot.java      |  2 +-
 .../hbase/util/TestProcessBasedCluster.java     |  2 +-
 .../hadoop/hbase/util/TestRegionSplitter.java   |  6 ++--
 .../hadoop/hbase/thrift2/TestHTablePool.java    | 24 ++++++++--------
 59 files changed, 198 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
index c38c386..d053e66 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -37,7 +38,7 @@ public class HTableFactory implements HTableInterfaceFactory {
   public HTableInterface createHTableInterface(Configuration config,
       byte[] tableName) {
     try {
-      return new HTable(config, tableName);
+      return new HTable(config, TableName.valueOf(tableName));
     } catch (IOException ioe) {
       throw new RuntimeException(ioe);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index a2afe55..6a743da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -76,7 +76,7 @@ public class AccessControlClient {
     try {
       TableName aclTableName =
           TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl");
-      ht = new HTable(conf, aclTableName.getName());
+      ht = new HTable(conf, aclTableName);
       Batch.Call<AccessControlService, GrantResponse> callable =
           new Batch.Call<AccessControlService, GrantResponse>() {
         ServerRpcController controller = new ServerRpcController();
@@ -156,7 +156,7 @@ public class AccessControlClient {
     try {
       TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR,
           "acl");
-      ht = new HTable(conf, aclTableName.getName());
+      ht = new HTable(conf, aclTableName);
       Batch.Call<AccessControlService, AccessControlProtos.RevokeResponse> callable =
           new Batch.Call<AccessControlService, AccessControlProtos.RevokeResponse>() {
         ServerRpcController controller = new ServerRpcController();
@@ -219,7 +219,7 @@ public class AccessControlClient {
       TableName aclTableName =
           TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl");
       ha = new HBaseAdmin(conf);
-      ht = new HTable(conf, aclTableName.getName());
+      ht = new HTable(conf, aclTableName);
       CoprocessorRpcChannel service = ht.coprocessorService(HConstants.EMPTY_START_ROW);
       BlockingInterface protocol =
           AccessControlProtos.AccessControlService.newBlockingStub(service);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index a56ddc3..6ac1d3f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -75,7 +75,7 @@ public class VisibilityClient {
       throws Throwable {
     Table ht = null;
     try {
-      ht = new HTable(conf, LABELS_TABLE_NAME.getName());
+      ht = new HTable(conf, LABELS_TABLE_NAME);
       Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse> callable = 
           new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {
         ServerRpcController controller = new ServerRpcController();
@@ -129,7 +129,7 @@ public class VisibilityClient {
   public static GetAuthsResponse getAuths(Configuration conf, final String user) throws Throwable {
     Table ht = null;
     try {
-      ht = new HTable(conf, LABELS_TABLE_NAME.getName());
+      ht = new HTable(conf, LABELS_TABLE_NAME);
       Batch.Call<VisibilityLabelsService, GetAuthsResponse> callable = 
           new Batch.Call<VisibilityLabelsService, GetAuthsResponse>() {
         ServerRpcController controller = new ServerRpcController();
@@ -171,7 +171,7 @@ public class VisibilityClient {
       final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable {
     Table ht = null;
     try {
-      ht = new HTable(conf, LABELS_TABLE_NAME.getName());
+      ht = new HTable(conf, LABELS_TABLE_NAME);
       Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse> callable = 
           new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {
         ServerRpcController controller = new ServerRpcController();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
index 391360f..87e655e 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
@@ -79,7 +79,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteEndpoint() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteEndpoint");
+    TableName tableName = TableName.valueOf("testBulkDeleteEndpoint");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -102,8 +102,8 @@ public class TestBulkDeleteProtocol {
   // @Ignore @Test
   public void testBulkDeleteEndpointWhenRowBatchSizeLessThanRowsToDeleteFromARegion()
       throws Throwable {
-    byte[] tableName = Bytes
-        .toBytes("testBulkDeleteEndpointWhenRowBatchSizeLessThanRowsToDeleteFromARegion");
+    TableName tableName = TableName
+        .valueOf("testBulkDeleteEndpointWhenRowBatchSizeLessThanRowsToDeleteFromARegion");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -123,7 +123,7 @@ public class TestBulkDeleteProtocol {
     ht.close();
   }
 
-  private long invokeBulkDeleteProtocol(byte[] tableName, final Scan scan, final int rowBatchSize,
+  private long invokeBulkDeleteProtocol(TableName tableName, final Scan scan, final int rowBatchSize,
       final DeleteType deleteType, final Long timeStamp) throws Throwable {
     Table ht = new HTable(TEST_UTIL.getConfiguration(), tableName);
     long noOfDeletedRows = 0L;
@@ -156,7 +156,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteWithConditionBasedDelete() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteWithConditionBasedDelete");
+    TableName tableName = TableName.valueOf("testBulkDeleteWithConditionBasedDelete");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -186,7 +186,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteColumn() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteColumn");
+    TableName tableName = TableName.valueOf("testBulkDeleteColumn");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -215,8 +215,8 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteFamily() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteFamily");
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
+    TableName tableName = TableName.valueOf("testBulkDeleteFamily");
+    HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(FAMILY1));
     htd.addFamily(new HColumnDescriptor(FAMILY2));
     TEST_UTIL.getHBaseAdmin().createTable(htd, Bytes.toBytes(0), Bytes.toBytes(120), 5);
@@ -246,7 +246,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteColumnVersion() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersion");
+    TableName tableName = TableName.valueOf("testBulkDeleteColumnVersion");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -294,7 +294,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteColumnVersionBasedOnTS() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersionBasedOnTS");
+    TableName tableName = TableName.valueOf("testBulkDeleteColumnVersionBasedOnTS");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -341,7 +341,7 @@ public class TestBulkDeleteProtocol {
 
   // @Ignore @Test
   public void testBulkDeleteWithNumberOfVersions() throws Throwable {
-    byte[] tableName = Bytes.toBytes("testBulkDeleteWithNumberOfVersions");
+    TableName tableName = TableName.valueOf("testBulkDeleteWithNumberOfVersions");
     Table ht = createTable(tableName);
     List<Put> puts = new ArrayList<Put>(100);
     for (int j = 0; j < 100; j++) {
@@ -424,8 +424,8 @@ public class TestBulkDeleteProtocol {
     ht.close();
   }
 
-  private Table createTable(byte[] tableName) throws IOException {
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
+  private Table createTable(TableName tableName) throws IOException {
+    HTableDescriptor htd = new HTableDescriptor(tableName);
     HColumnDescriptor hcd = new HColumnDescriptor(FAMILY1);
     hcd.setMaxVersions(10);// Just setting 10 as I am not testing with more than 10 versions here
     htd.addFamily(hcd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
index b28c247..ddc5847 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
@@ -46,7 +46,7 @@ import static junit.framework.Assert.*;
  */
 @Category({CoprocessorTests.class, MediumTests.class})
 public class TestRowCountEndpoint {
-  private static final byte[] TEST_TABLE = Bytes.toBytes("testrowcounter");
+  private static final TableName TEST_TABLE = TableName.valueOf("testrowcounter");
   private static final byte[] TEST_FAMILY = Bytes.toBytes("f");
   private static final byte[] TEST_COLUMN = Bytes.toBytes("col");
 
@@ -61,7 +61,7 @@ public class TestRowCountEndpoint {
         RowCountEndpoint.class.getName());
 
     TEST_UTIL.startMiniCluster();
-    TEST_UTIL.createTable(TableName.valueOf(TEST_TABLE), new byte[][]{TEST_FAMILY});
+    TEST_UTIL.createTable(TEST_TABLE, new byte[][]{TEST_FAMILY});
   }
 
   // @Ignore @AfterClass

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
index 60a2c99..11acea0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
@@ -25,6 +25,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -52,7 +53,7 @@ implements Partitioner<ImmutableBytesWritable, V2> {
   public void configure(JobConf job) {
     try {
       this.table = new HTable(HBaseConfiguration.create(job),
-        job.get(TableOutputFormat.OUTPUT_TABLE));
+        TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE)));
     } catch (IOException e) {
       LOG.error(e);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
index 6c96f86..e1220fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -57,7 +58,8 @@ public class TableInputFormat extends TableInputFormatBase implements
     }
     setInputColumns(m_cols);
     try {
-      setHTable(new HTable(HBaseConfiguration.create(job), tableNames[0].getName()));
+      setHTable(
+          new HTable(HBaseConfiguration.create(job), TableName.valueOf(tableNames[0].getName())));
     } catch (Exception e) {
       LOG.error(StringUtils.stringifyException(e));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
index 0f7cded..5a5f544 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -85,7 +86,7 @@ FileOutputFormat<ImmutableBytesWritable, Put> {
 
     // expecting exactly one path
 
-    String tableName = job.get(OUTPUT_TABLE);
+    TableName tableName = TableName.valueOf(job.get(OUTPUT_TABLE));
     HTable table = null;
     try {
       table = new HTable(HBaseConfiguration.create(job), tableName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 761c46f..20169e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
@@ -80,13 +81,13 @@ public abstract class MultiTableInputFormatBase extends
     TableSplit tSplit = (TableSplit) split;
     LOG.info(MessageFormat.format("Input split length: {0} bytes.", tSplit.getLength()));
 
-    if (tSplit.getTableName() == null) {
+    if (tSplit.getTable() == null) {
       throw new IOException("Cannot create a record reader because of a"
           + " previous error. Please look at the previous logs lines from"
           + " the task's full log for more details.");
     }
     Table table =
-        new HTable(context.getConfiguration(), tSplit.getTableName());
+        new HTable(context.getConfiguration(), tSplit.getTable());
 
     TableRecordReader trr = this.tableRecordReader;
 
@@ -133,7 +134,7 @@ public abstract class MultiTableInputFormatBase extends
 
       HTable table = null;
       try {
-        table = new HTable(context.getConfiguration(), tableName);
+        table = new HTable(context.getConfiguration(), TableName.valueOf(tableName));
         Pair<byte[][], byte[][]> keys = table.getStartEndKeys();
         if (keys == null || keys.getFirst() == null ||
             keys.getFirst().length == 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
index 5902a8e..c1d8373 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Mutation;
@@ -102,7 +103,7 @@ public class MultiTableOutputFormat extends OutputFormat<ImmutableBytesWritable,
     HTable getTable(ImmutableBytesWritable tableName) throws IOException {
       if (!tables.containsKey(tableName)) {
         LOG.debug("Opening HTable \"" + Bytes.toString(tableName.get())+ "\" for writing");
-        HTable table = new HTable(conf, tableName.get());
+        HTable table = new HTable(conf, TableName.valueOf(tableName.get()));
         table.setAutoFlush(false, true);
         tables.put(tableName, table);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index 568384c..268f2b1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -102,6 +103,35 @@ public class TableMapReduceUtil {
         job, true);
   }
 
+
+  /**
+   * Use this before submitting a TableMap job. It will appropriately set up
+   * the job.
+   *
+   * @param table  The table name to read from.
+   * @param scan  The scan instance with the columns, time range etc.
+   * @param mapper  The mapper class to use.
+   * @param outputKeyClass  The class of the output key.
+   * @param outputValueClass  The class of the output value.
+   * @param job  The current job to adjust.  Make sure the passed job is
+   * carrying all necessary HBase configuration.
+   * @throws IOException When setting up the details fails.
+   */
+  public static void initTableMapperJob(TableName table,
+      Scan scan,
+      Class<? extends TableMapper> mapper,
+      Class<?> outputKeyClass,
+      Class<?> outputValueClass,
+      Job job) throws IOException {
+    initTableMapperJob(table.getNameAsString(),
+        scan,
+        mapper,
+        outputKeyClass,
+        outputValueClass,
+        job,
+        true);
+  }
+
   /**
    * Use this before submitting a TableMap job. It will appropriately set up
    * the job.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index d5cdb39..e6e975f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -480,7 +480,7 @@ public final class Canary implements Tool {
     Table table = null;
 
     try {
-      table = new HTable(admin.getConfiguration(), tableDesc.getName());
+      table = new HTable(admin.getConfiguration(), tableDesc.getTableName());
     } catch (TableNotFoundException e) {
       return;
     }
@@ -679,7 +679,7 @@ public final class Canary implements Tool {
         HTableDescriptor[] tableDescs = this.admin.listTables();
         List<HRegionInfo> regions = null;
         for (HTableDescriptor tableDesc : tableDescs) {
-          table = new HTable(this.admin.getConfiguration(), tableDesc.getName());
+          table = new HTable(this.admin.getConfiguration(), tableDesc.getTableName());
 
           for (Map.Entry<HRegionInfo, ServerName> entry : table
               .getRegionLocations().entrySet()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index 9055e51..483da8f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -339,7 +339,7 @@ public class RegionSplitter {
 		  "UniformSplit treats keys as arbitrary bytes.", opt);
       return;
     }
-    String tableName = cmd.getArgs()[0];
+    TableName tableName = TableName.valueOf(cmd.getArgs()[0]);
     String splitClass = cmd.getArgs()[1];
     SplitAlgorithm splitAlgo = newSplitAlgoInstance(conf, splitClass);
 
@@ -363,7 +363,7 @@ public class RegionSplitter {
     }
   }
 
-  static void createPresplitTable(String tableName, SplitAlgorithm splitAlgo,
+  static void createPresplitTable(TableName tableName, SplitAlgorithm splitAlgo,
           String[] columnFamilies, Configuration conf) throws IOException,
           InterruptedException {
     final int splitCount = conf.getInt("split.count", 0);
@@ -374,7 +374,7 @@ public class RegionSplitter {
     LOG.debug("Creating table " + tableName + " with " + columnFamilies.length
         + " column families.  Presplitting to " + splitCount + " regions");
 
-    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+    HTableDescriptor desc = new HTableDescriptor(tableName);
     for (String cf : columnFamilies) {
       desc.addFamily(new HColumnDescriptor(Bytes.toBytes(cf)));
     }
@@ -399,7 +399,7 @@ public class RegionSplitter {
     LOG.debug("Finished creating table with " + splitCount + " regions");
   }
 
-  static void rollingSplit(String tableName, SplitAlgorithm splitAlgo,
+  static void rollingSplit(TableName tableName, SplitAlgorithm splitAlgo,
           Configuration conf) throws IOException, InterruptedException {
     final int minOS = conf.getInt("split.outstanding", 2);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index de530aa..296ef4e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1229,7 +1229,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
       desc.addFamily(hcd);
     }
     getHBaseAdmin().createTable(desc);
-    return new HTable(c, tableName);
+    return new HTable(c, desc.getTableName());
   }
 
   /**
@@ -1275,7 +1275,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
       desc.addFamily(hcd);
     }
     getHBaseAdmin().createTable(desc);
-    return new HTable(c, tableName);
+    return new HTable(c, desc.getTableName());
   }
 
   /**
@@ -1465,8 +1465,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
     }
     getHBaseAdmin().createTable(desc, splitRows);
     // HBaseAdmin only waits for regions to appear in hbase:meta we should wait until they are assigned
-    waitUntilAllRegionsAssigned(TableName.valueOf(tableName));
-    return new HTable(getConfiguration(), tableName);
+    waitUntilAllRegionsAssigned(desc.getTableName());
+    return new HTable(getConfiguration(), desc.getTableName());
   }
 
   /**
@@ -2713,9 +2713,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
    * @throws InterruptedException
    * @throws IOException
    */
-  public void waitTableAvailable(byte[] table)
+  public void waitTableAvailable(TableName table)
       throws InterruptedException, IOException {
-    waitTableAvailable(getHBaseAdmin(), table, 30000);
+    waitTableAvailable(getHBaseAdmin(), table.getName(), 30000);
   }
 
   public void waitTableAvailable(Admin admin, byte[] table)
@@ -2755,9 +2755,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
    * @throws InterruptedException
    * @throws IOException
    */
-  public void waitTableEnabled(byte[] table)
+  public void waitTableEnabled(TableName table)
       throws InterruptedException, IOException {
-    waitTableEnabled(getHBaseAdmin(), table, 30000);
+    waitTableEnabled(getHBaseAdmin(), table.getName(), 30000);
   }
 
   public void waitTableEnabled(Admin admin, byte[] table)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
index 0e62d77..3bcb0bb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
@@ -58,7 +58,7 @@ import com.google.common.collect.Lists;
 @Category({FlakeyTests.class, MediumTests.class})
 public class TestAcidGuarantees implements Tool {
   protected static final Log LOG = LogFactory.getLog(TestAcidGuarantees.class);
-  public static final byte [] TABLE_NAME = Bytes.toBytes("TestAcidGuarantees");
+  public static final TableName TABLE_NAME = TableName.valueOf("TestAcidGuarantees");
   public static final byte [] FAMILY_A = Bytes.toBytes("A");
   public static final byte [] FAMILY_B = Bytes.toBytes("B");
   public static final byte [] FAMILY_C = Bytes.toBytes("C");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 6c70384..2a9b953 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -197,8 +197,8 @@ public class TestMultiVersions {
    */
   @Test
   public void testScanMultipleVersions() throws Exception {
-    final byte [] tableName = Bytes.toBytes("testScanMultipleVersions");
-    final HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+    final TableName tableName = TableName.valueOf("testScanMultipleVersions");
+    final HTableDescriptor desc = new HTableDescriptor(tableName);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     final byte [][] rows = new byte[][] {
       Bytes.toBytes("row_0200"),

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
index 7f93780..08dab86 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
@@ -615,8 +615,8 @@ public class TestAdmin {
 
   @Test (timeout=300000)
   public void testCreateTableNumberOfRegions() throws IOException, InterruptedException {
-    byte[] tableName = Bytes.toBytes("testCreateTableNumberOfRegions");
-    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+    TableName tableName = TableName.valueOf("testCreateTableNumberOfRegions");
+    HTableDescriptor desc = new HTableDescriptor(tableName);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc);
     HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName);
@@ -624,8 +624,8 @@ public class TestAdmin {
     assertEquals("Table should have only 1 region", 1, regions.size());
     ht.close();
 
-    byte [] TABLE_2 = Bytes.add(tableName, Bytes.toBytes("_2"));
-    desc = new HTableDescriptor(TableName.valueOf(TABLE_2));
+    TableName TABLE_2 = TableName.valueOf(tableName.getNameAsString() + "_2");
+    desc = new HTableDescriptor(TABLE_2);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc, new byte[][]{new byte[]{42}});
     HTable ht2 = new HTable(TEST_UTIL.getConfiguration(), TABLE_2);
@@ -633,8 +633,8 @@ public class TestAdmin {
     assertEquals("Table should have only 2 region", 2, regions.size());
     ht2.close();
 
-    byte [] TABLE_3 = Bytes.add(tableName, Bytes.toBytes("_3"));
-    desc = new HTableDescriptor(TableName.valueOf(TABLE_3));
+    TableName TABLE_3 = TableName.valueOf(tableName.getNameAsString() + "_3");
+    desc = new HTableDescriptor(TABLE_3);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc, "a".getBytes(), "z".getBytes(), 3);
     HTable ht3 = new HTable(TEST_UTIL.getConfiguration(), TABLE_3);
@@ -642,8 +642,8 @@ public class TestAdmin {
     assertEquals("Table should have only 3 region", 3, regions.size());
     ht3.close();
 
-    byte [] TABLE_4 = Bytes.add(tableName, Bytes.toBytes("_4"));
-    desc = new HTableDescriptor(TableName.valueOf(TABLE_4));
+    TableName TABLE_4 = TableName.valueOf(tableName.getNameAsString() + "_4");
+    desc = new HTableDescriptor(TABLE_4);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     try {
       admin.createTable(desc, "a".getBytes(), "z".getBytes(), 2);
@@ -652,8 +652,8 @@ public class TestAdmin {
     // Expected
     }
 
-    byte [] TABLE_5 = Bytes.add(tableName, Bytes.toBytes("_5"));
-    desc = new HTableDescriptor(TableName.valueOf(TABLE_5));
+    TableName TABLE_5 = TableName.valueOf(tableName.getNameAsString() + "_5");
+    desc = new HTableDescriptor(TABLE_5);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc, new byte[] {1}, new byte[] {127}, 16);
     HTable ht5 = new HTable(TEST_UTIL.getConfiguration(), TABLE_5);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index aa44647..bfece26 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -172,7 +172,7 @@ public class TestFromClientSide {
    */
    @Test
    public void testKeepDeletedCells() throws Exception {
-     final byte[] TABLENAME = Bytes.toBytes("testKeepDeletesCells");
+     final TableName TABLENAME = TableName.valueOf("testKeepDeletesCells");
      final byte[] FAMILY = Bytes.toBytes("family");
      final byte[] C0 = Bytes.toBytes("c0");
 
@@ -400,7 +400,7 @@ public class TestFromClientSide {
    */
   @Test
   public void testWeirdCacheBehaviour() throws Exception {
-    byte [] TABLE = Bytes.toBytes("testWeirdCacheBehaviour");
+    TableName TABLE = TableName.valueOf("testWeirdCacheBehaviour");
     byte [][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"),
         Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"),
         Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
@@ -4209,8 +4209,7 @@ public class TestFromClientSide {
 
     // Test that attribute changes were applied
     desc = a.getTableDescriptor();
-    assertTrue("wrong table descriptor returned",
-      desc.getTableName().equals(tableAname));
+    assertEquals("wrong table descriptor returned", desc.getTableName(), tableAname);
     // check HTD attribute
     value = desc.getValue(attrName);
     assertFalse("missing HTD attribute value", value == null);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index ffe1e19..3e29725 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -236,7 +236,7 @@ public class TestHCM {
     final ServerName sn = rs.getRegionServer().getServerName();
 
     HTable t = TEST_UTIL.createTable(tn, cf);
-    TEST_UTIL.waitTableAvailable(tn.getName());
+    TEST_UTIL.waitTableAvailable(tn);
 
     while(TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager().
         getRegionStates().isRegionsInTransition()){
@@ -333,8 +333,8 @@ public class TestHCM {
 
 
   private void testConnectionClose(boolean allowsInterrupt) throws Exception {
-    String tableName = "HCM-testConnectionClose" + allowsInterrupt;
-    TEST_UTIL.createTable(tableName.getBytes(), FAM_NAM).close();
+    TableName tableName = TableName.valueOf("HCM-testConnectionClose" + allowsInterrupt);
+    TEST_UTIL.createTable(tableName, FAM_NAM).close();
 
     boolean previousBalance = TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true);
 
@@ -346,7 +346,7 @@ public class TestHCM {
     c2.setInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, 0); // Server do not really expire
     c2.setBoolean(RpcClient.SPECIFIC_WRITE_THREAD, allowsInterrupt);
 
-    final HTable table = new HTable(c2, tableName.getBytes());
+    final HTable table = new HTable(c2, tableName);
 
     Put put = new Put(ROW);
     put.add(FAM_NAM, ROW, ROW);
@@ -415,8 +415,8 @@ public class TestHCM {
    */
   @Test
   public void testConnectionIdle() throws Exception {
-    String tableName = "HCM-testConnectionIdle";
-    TEST_UTIL.createTable(tableName.getBytes(), FAM_NAM).close();
+    TableName tableName = TableName.valueOf("HCM-testConnectionIdle");
+    TEST_UTIL.createTable(tableName, FAM_NAM).close();
     int idleTime =  20000;
     boolean previousBalance = TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true);
 
@@ -426,7 +426,7 @@ public class TestHCM {
     c2.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); // Don't retry: retry = test failed
     c2.setInt(RpcClient.IDLE_TIME, idleTime);
 
-    final Table table = new HTable(c2, tableName.getBytes());
+    final Table table = new HTable(c2, tableName);
 
     Put put = new Put(ROW);
     put.add(FAM_NAM, ROW, ROW);
@@ -478,9 +478,9 @@ public class TestHCM {
       return;
     }
 
-    String tableName = "HCM-testConnectionCut";
+    TableName tableName = TableName.valueOf("HCM-testConnectionCut");
 
-    TEST_UTIL.createTable(tableName.getBytes(), FAM_NAM).close();
+    TEST_UTIL.createTable(tableName, FAM_NAM).close();
     boolean previousBalance = TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true);
 
     Configuration c2 = new Configuration(TEST_UTIL.getConfiguration());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index e4bf256..b911f6e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -82,7 +82,7 @@ public class TestMultiParallel {
     UTIL.startMiniCluster(slaves);
     HTable t = UTIL.createTable(TEST_TABLE, Bytes.toBytes(FAMILY));
     UTIL.createMultiRegions(t, Bytes.toBytes(FAMILY));
-    UTIL.waitTableEnabled(TEST_TABLE.getName());
+    UTIL.waitTableEnabled(TEST_TABLE);
     t.close();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index accd764..2fe121d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -533,7 +533,7 @@ public class TestClassLoading {
 
   private void waitForTable(TableName name) throws InterruptedException, IOException {
     // First wait until all regions are online
-    TEST_UTIL.waitTableEnabled(name.getName());
+    TEST_UTIL.waitTableEnabled(name);
     // Now wait a bit longer for the coprocessor hosts to load the CPs
     Thread.sleep(1000);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
index 504877d..fac66c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
@@ -71,7 +71,7 @@ public class TestFilterWithScanLimits {
   private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
   private static Configuration conf = null;
   private static HBaseAdmin admin = null;
-  private static byte[] name = Bytes.toBytes("test");
+  private static TableName name = TableName.valueOf("test");
 
   @Test
   public void testScanWithLimit() {
@@ -141,7 +141,7 @@ public class TestFilterWithScanLimits {
     assertNotNull("HBaseAdmin is not initialized successfully.", admin);
     if (admin != null) {
 
-      HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name));
+      HTableDescriptor desc = new HTableDescriptor(name);
       HColumnDescriptor coldef = new HColumnDescriptor(Bytes.toBytes("f1"));
       desc.addFamily(coldef);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
index 615fabb..1cffe1d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
@@ -65,7 +65,7 @@ public class TestFilterWrapper {
   private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
   private static Configuration conf = null;
   private static HBaseAdmin admin = null;
-  private static byte[] name = Bytes.toBytes("test");
+  private static TableName name = TableName.valueOf("test");
 
   @Test
   public void testFilterWrapper() {
@@ -144,7 +144,7 @@ public class TestFilterWrapper {
     assertNotNull("HBaseAdmin is not initialized successfully.", admin);
     if (admin != null) {
 
-      HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name));
+      HTableDescriptor desc = new HTableDescriptor(name);
       HColumnDescriptor coldef = new HColumnDescriptor(Bytes.toBytes("f1"));
       desc.addFamily(coldef);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index 8f6e167..9bdebe6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -449,7 +449,7 @@ public class TestHFileOutputFormat  {
         LOG.info("Waiting for table to disable");
       }
       admin.enableTable(TABLE_NAME);
-      util.waitTableAvailable(TABLE_NAME.getName());
+      util.waitTableAvailable(TABLE_NAME);
       assertEquals("Data should remain after reopening of regions",
           tableDigestBefore, util.checksumRows(table));
     } finally {
@@ -1049,7 +1049,7 @@ public class TestHFileOutputFormat  {
     Configuration conf = HBaseConfiguration.create();
     util = new HBaseTestingUtility(conf);
     if ("newtable".equals(args[0])) {
-      byte[] tname = args[1].getBytes();
+      TableName tname = TableName.valueOf(args[1]);
       HTable table = util.createTable(tname, FAMILIES);
       HBaseAdmin admin = new HBaseAdmin(conf);
       admin.disableTable(tname);
@@ -1057,7 +1057,7 @@ public class TestHFileOutputFormat  {
       util.createMultiRegions(conf, table, FAMILIES[0], startKeys);
       admin.enableTable(tname);
     } else if ("incremental".equals(args[0])) {
-      byte[] tname = args[1].getBytes();
+      TableName tname = TableName.valueOf(args[1]);
       HTable table = new HTable(conf, tname);
       Path outDir = new Path("incremental-out");
       runIncrementalPELoad(conf, table, outDir);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index c14b9a6..6ab675f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -448,7 +448,7 @@ public class TestHFileOutputFormat2  {
         LOG.info("Waiting for table to disable");
       }
       admin.enableTable(TABLE_NAME);
-      util.waitTableAvailable(TABLE_NAME.getName());
+      util.waitTableAvailable(TABLE_NAME);
       assertEquals("Data should remain after reopening of regions",
           tableDigestBefore, util.checksumRows(table));
     } finally {
@@ -1057,7 +1057,7 @@ public class TestHFileOutputFormat2  {
       util.createMultiRegions(conf, table, FAMILIES[0], startKeys);
       admin.enableTable(tname);
     } else if ("incremental".equals(args[0])) {
-      byte[] tname = args[1].getBytes();
+      TableName tname = TableName.valueOf(args[1]);
       HTable table = new HTable(conf, tname);
       Path outDir = new Path("incremental-out");
       runIncrementalPELoad(conf, table, outDir);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
index fcd8216..e8aca29 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
@@ -56,7 +56,7 @@ public class TestSecureLoadIncrementalHFiles extends  TestLoadIncrementalHFiles{
     util.startMiniCluster();
 
     // Wait for the ACL table to become available
-    util.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    util.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     setupNamespace();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
index 76a6d57..fe9c132 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
@@ -58,7 +58,7 @@ public class TestSecureLoadIncrementalHFilesSplitRecovery extends TestLoadIncrem
     util.startMiniCluster();
 
     // Wait for the ACL table to become available
-    util.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    util.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
   }
 
   //Disabling this test as it does not work in secure mode

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index c218bc9..c82d82f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -54,7 +54,7 @@ import org.junit.Test;
 public abstract class TestTableMapReduceBase {
 
   protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
-  protected static final byte[] MULTI_REGION_TABLE_NAME = Bytes.toBytes("mrtest");
+  protected static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest");
   protected static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
   protected static final byte[] OUTPUT_FAMILY = Bytes.toBytes("text");
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 58d1b6e..b701c35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -78,7 +78,7 @@ public class TestTimeRangeMapRed {
   static final long MINSTAMP = 1245620005;
   static final long MAXSTAMP = 1245620100 + 1; // maxStamp itself is excluded. so increment it.
 
-  static final byte[] TABLE_NAME = Bytes.toBytes("table123");
+  static final TableName TABLE_NAME = TableName.valueOf("table123");
   static final byte[] FAMILY_NAME = Bytes.toBytes("text");
   static final byte[] COLUMN_NAME = Bytes.toBytes("input");
 
@@ -146,7 +146,7 @@ public class TestTimeRangeMapRed {
   @Test
   public void testTimeRangeMapRed()
   throws IOException, InterruptedException, ClassNotFoundException {
-    final HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
+    final HTableDescriptor desc = new HTableDescriptor(TABLE_NAME);
     final HColumnDescriptor col = new HColumnDescriptor(FAMILY_NAME);
     col.setMaxVersions(Integer.MAX_VALUE);
     desc.addFamily(col);
@@ -179,7 +179,7 @@ public class TestTimeRangeMapRed {
       scan.addColumn(FAMILY_NAME, COLUMN_NAME);
       scan.setTimeRange(MINSTAMP, MAXSTAMP);
       scan.setMaxVersions();
-      TableMapReduceUtil.initTableMapperJob(Bytes.toString(TABLE_NAME),
+      TableMapReduceUtil.initTableMapperJob(TABLE_NAME,
         scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job);
       job.waitForCompletion(true);
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 8d38bdd..4e6fe59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -113,7 +113,7 @@ public class TestMasterOperationsForRegionReplicas {
       desc.setRegionReplication(numReplica);
       desc.addFamily(new HColumnDescriptor("family"));
       admin.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), numRegions);
-      TEST_UTIL.waitTableEnabled(table.getName());
+      TEST_UTIL.waitTableEnabled(table);
       validateNumberOfRowsInMeta(table, numRegions, admin.getConnection());
 
       List<HRegionInfo> hris = MetaTableAccessor.getTableRegions(admin.getConnection(), table);
@@ -169,7 +169,7 @@ public class TestMasterOperationsForRegionReplicas {
       TEST_UTIL.getConfiguration().setBoolean("hbase.master.startup.retainassign", true);
       TEST_UTIL.shutdownMiniHBaseCluster();
       TEST_UTIL.startMiniHBaseCluster(1, numSlaves);
-      TEST_UTIL.waitTableEnabled(table.getName());
+      TEST_UTIL.waitTableEnabled(table);
       admin.close();
       admin = new HBaseAdmin(conf); 
       validateFromSnapshotFromMeta(TEST_UTIL, table, numRegions, numReplica,
@@ -179,7 +179,7 @@ public class TestMasterOperationsForRegionReplicas {
       // one server running
       TEST_UTIL.shutdownMiniHBaseCluster();
       TEST_UTIL.startMiniHBaseCluster(1, 1);
-      TEST_UTIL.waitTableEnabled(table.getName());
+      TEST_UTIL.waitTableEnabled(table);
       admin.close();
       admin = new HBaseAdmin(conf);
       validateSingleRegionServerAssignment(admin.getConnection(), numRegions, numReplica);
@@ -242,7 +242,7 @@ public class TestMasterOperationsForRegionReplicas {
       desc.setRegionReplication(numReplica);
       desc.addFamily(new HColumnDescriptor("family"));
       admin.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), numRegions);
-      TEST_UTIL.waitTableEnabled(table.getName());
+      TEST_UTIL.waitTableEnabled(table);
       Set<byte[]> tableRows = new HashSet<byte[]>();
       List<HRegionInfo> hris = MetaTableAccessor.getTableRegions(admin.getConnection(), table);
       for (HRegionInfo hri : hris) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index c607459..c34fb48 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -212,7 +212,7 @@ public class TestRegionPlacement {
   @Test
   public void testRegionPlacement() throws Exception {
     String tableStr = "testRegionAssignment";
-    byte[] table = Bytes.toBytes(tableStr);
+    TableName table = TableName.valueOf(tableStr);
     // Create a table with REGION_NUM regions.
     createTable(table, REGION_NUM);
 
@@ -667,7 +667,7 @@ public class TestRegionPlacement {
    * @return
    * @throws IOException
    */
-  private static void createTable(byte[] tableName, int regionNum)
+  private static void createTable(TableName tableName, int regionNum)
       throws IOException {
     int expectedRegions = regionNum;
     byte[][] splitKeys = new byte[expectedRegions - 1][];
@@ -676,7 +676,7 @@ public class TestRegionPlacement {
       splitKeys[i - 1] = new byte[] { splitKey, splitKey, splitKey };
     }
 
-    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+    HTableDescriptor desc = new HTableDescriptor(tableName);
     desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc, splitKeys);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
index 0b1ccb2..74afe7e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
@@ -72,7 +72,7 @@ public class TestRestartCluster {
       UTIL.createTable(TABLE, FAMILY);
     }
     for(TableName TABLE : TABLES) {
-      UTIL.waitTableEnabled(TABLE.getName());
+      UTIL.waitTableEnabled(TABLE);
     }
 
     List<HRegionInfo> allRegions =
@@ -101,7 +101,7 @@ public class TestRestartCluster {
       } catch(TableExistsException tee) {
         LOG.info("Table already exists as expected");
       }
-      UTIL.waitTableAvailable(TABLE.getName());
+      UTIL.waitTableAvailable(TABLE);
     }
   }
 
@@ -122,7 +122,7 @@ public class TestRestartCluster {
       UTIL.createTable(TABLE, FAMILY);
     }
     for(TableName TABLE : TABLES) {
-      UTIL.waitTableEnabled(TABLE.getName());
+      UTIL.waitTableEnabled(TABLE);
     }
 
     HMaster master = UTIL.getMiniHBaseCluster().getMaster();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index 5d24368..54f0691 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -394,7 +394,7 @@ public class TestTableLockManager {
 
     alterThread.start();
     splitThread.start();
-    TEST_UTIL.waitTableEnabled(tableName.toBytes());
+    TEST_UTIL.waitTableEnabled(tableName);
     while (true) {
       List<HRegionInfo> regions = admin.getTableRegions(tableName);
       LOG.info(String.format("Table #regions: %d regions: %s:", regions.size(), regions));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
index 622cac2..18dd5ae 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
@@ -67,7 +67,7 @@ public class TestQuotaAdmin {
     TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 6);
     TEST_UTIL.getConfiguration().setBoolean("hbase.master.enabletable.roundrobin", true);
     TEST_UTIL.startMiniCluster(1);
-    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME.getName());
+    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME);
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
index d4185a1..84d9155 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
@@ -58,7 +58,7 @@ public class TestQuotaTableUtil {
     TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 6);
     TEST_UTIL.getConfiguration().setBoolean("hbase.master.enabletable.roundrobin", true);
     TEST_UTIL.startMiniCluster(1);
-    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME.getName());
+    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME);
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index 8dbb76e..0901d2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -75,7 +75,7 @@ public class TestQuotaThrottle {
     TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 6);
     TEST_UTIL.getConfiguration().setBoolean("hbase.master.enabletable.roundrobin", true);
     TEST_UTIL.startMiniCluster(1);
-    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME.getName());
+    TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME);
     QuotaCache.TEST_FORCE_REFRESH = true;
 
     tables = new HTable[TABLE_NAMES.length];

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index 4d04dc6..941f6d2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -208,7 +208,7 @@ public class TestEncryptionKeyRotation {
     TEST_UTIL.getHBaseAdmin().createTable(htd);
     TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
     // Create a store file
-    Table table = new HTable(conf, htd.getName());
+    Table table = new HTable(conf, htd.getTableName());
     try {
       table.put(new Put(Bytes.toBytes("testrow"))
         .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
index f01b089..9a2cc82 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
@@ -187,11 +187,11 @@ public class TestFSErrorsExposed {
       util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3);
 
       util.startMiniCluster(1);
-      byte[] tableName = Bytes.toBytes("table");
+      TableName tableName = TableName.valueOf("table");
       byte[] fam = Bytes.toBytes("fam");
 
       Admin admin = new HBaseAdmin(util.getConfiguration());
-      HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+      HTableDescriptor desc = new HTableDescriptor(tableName);
       desc.addFamily(new HColumnDescriptor(fam)
           .setMaxVersions(1)
           .setBlockCacheEnabled(false)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
index d56aaa0..b8e6382 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
@@ -93,8 +93,8 @@ public class TestJoinedScanners {
       cluster = htu.startMiniCluster(1, regionServersCount, dataNodeHosts);
       byte [][] families = {cf_essential, cf_joined};
 
-      byte[] tableName = Bytes.toBytes(this.getClass().getSimpleName());
-      HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+      TableName tableName = TableName.valueOf(this.getClass().getSimpleName());
+      HTableDescriptor desc = new HTableDescriptor(tableName);
       for(byte[] family : families) {
         HColumnDescriptor hcd = new HColumnDescriptor(family);
         hcd.setDataBlockEncoding(blockEncoding);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
index 1718fd1..a501af9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
@@ -75,7 +75,7 @@ public class TestMasterReplication {
   private static final long SLEEP_TIME = 500;
   private static final int NB_RETRIES = 10;
 
-  private static final byte[] tableName = Bytes.toBytes("test");
+  private static final TableName tableName = TableName.valueOf("test");
   private static final byte[] famName = Bytes.toBytes("f");
   private static final byte[] row = Bytes.toBytes("row");
   private static final byte[] row1 = Bytes.toBytes("row1");
@@ -108,7 +108,7 @@ public class TestMasterReplication {
         CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY,
         CoprocessorCounter.class.getName());
 
-    table = new HTableDescriptor(TableName.valueOf(tableName));
+    table = new HTableDescriptor(tableName);
     HColumnDescriptor fam = new HColumnDescriptor(famName);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
     table.addFamily(fam);
@@ -346,7 +346,7 @@ public class TestMasterReplication {
   }
 
   @SuppressWarnings("resource")
-  private Table[] getHTablesOnClusters(byte[] tableName) throws Exception {
+  private Table[] getHTablesOnClusters(TableName tableName) throws Exception {
     int numClusters = utilities.length;
     Table[] htables = new Table[numClusters];
     for (int i = 0; i < numClusters; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
index 8e4d700..6b241b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
@@ -62,7 +62,7 @@ public class TestMultiSlaveReplication {
   private static final long SLEEP_TIME = 500;
   private static final int NB_RETRIES = 100;
 
-  private static final byte[] tableName = Bytes.toBytes("test");
+  private static final TableName tableName = TableName.valueOf("test");
   private static final byte[] famName = Bytes.toBytes("f");
   private static final byte[] row = Bytes.toBytes("row");
   private static final byte[] row1 = Bytes.toBytes("row1");
@@ -108,7 +108,7 @@ public class TestMultiSlaveReplication {
     utility3.setZkCluster(miniZK);
     new ZooKeeperWatcher(conf3, "cluster3", null, true);
 
-    table = new HTableDescriptor(TableName.valueOf(tableName));
+    table = new HTableDescriptor(tableName);
     HColumnDescriptor fam = new HColumnDescriptor(famName);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
     table.addFamily(fam);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index 7778baa..ab5f136 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -67,10 +67,10 @@ public class TestPerTableCFReplication {
   private static final long SLEEP_TIME = 500;
   private static final int NB_RETRIES = 100;
 
-  private static final byte[] tableName = Bytes.toBytes("test");
-  private static final byte[] tabAName = Bytes.toBytes("TA");
-  private static final byte[] tabBName = Bytes.toBytes("TB");
-  private static final byte[] tabCName = Bytes.toBytes("TC");
+  private static final TableName tableName = TableName.valueOf("test");
+  private static final TableName tabAName = TableName.valueOf("TA");
+  private static final TableName tabBName = TableName.valueOf("TB");
+  private static final TableName tabCName = TableName.valueOf("TC");
   private static final byte[] famName = Bytes.toBytes("f");
   private static final byte[] f1Name = Bytes.toBytes("f1");
   private static final byte[] f2Name = Bytes.toBytes("f2");
@@ -121,7 +121,7 @@ public class TestPerTableCFReplication {
     utility3.setZkCluster(miniZK);
     new ZooKeeperWatcher(conf3, "cluster3", null, true);
 
-    table = new HTableDescriptor(TableName.valueOf(tableName));
+    table = new HTableDescriptor(tableName);
     HColumnDescriptor fam = new HColumnDescriptor(famName);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
     table.addFamily(fam);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
index 252d3a4..9fe2e0b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
@@ -75,7 +75,7 @@ public class TestReplicationBase {
   protected static final long SLEEP_TIME = 500;
   protected static final int NB_RETRIES = 10;
 
-  protected static final byte[] tableName = Bytes.toBytes("test");
+  protected static final TableName tableName = TableName.valueOf("test");
   protected static final byte[] famName = Bytes.toBytes("f");
   protected static final byte[] row = Bytes.toBytes("row");
   protected static final byte[] noRepfamName = Bytes.toBytes("norep");
@@ -129,7 +129,7 @@ public class TestReplicationBase {
     utility1.startMiniCluster(2);
     utility2.startMiniCluster(2);
 
-    HTableDescriptor table = new HTableDescriptor(TableName.valueOf(tableName));
+    HTableDescriptor table = new HTableDescriptor(tableName);
     HColumnDescriptor fam = new HColumnDescriptor(famName);
     fam.setMaxVersions(3);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index cd363e7..d698255 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -446,7 +446,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
     // identical since it does the check
     testSmallBatch();
 
-    String[] args = new String[] {"2", Bytes.toString(tableName)};
+    String[] args = new String[] {"2", tableName.getNameAsString()};
     Job job = VerifyReplication.createSubmittableJob(CONF_WITH_LOCALFS, args);
     if (job == null) {
       fail("Job wasn't created, see the log");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
index 701c974..58eb19f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
@@ -45,8 +45,8 @@ public class TestReplicationSyncUpTool extends TestReplicationBase {
 
   private static final Log LOG = LogFactory.getLog(TestReplicationSyncUpTool.class);
 
-  private static final byte[] t1_su = Bytes.toBytes("t1_syncup");
-  private static final byte[] t2_su = Bytes.toBytes("t2_syncup");
+  private static final TableName t1_su = TableName.valueOf("t1_syncup");
+  private static final TableName t2_su = TableName.valueOf("t2_syncup");
 
   private static final byte[] famName = Bytes.toBytes("cf1");
   private static final byte[] qualName = Bytes.toBytes("q1");
@@ -63,27 +63,27 @@ public class TestReplicationSyncUpTool extends TestReplicationBase {
 
     HColumnDescriptor fam;
 
-    t1_syncupSource = new HTableDescriptor(TableName.valueOf(t1_su));
+    t1_syncupSource = new HTableDescriptor(t1_su);
     fam = new HColumnDescriptor(famName);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
     t1_syncupSource.addFamily(fam);
     fam = new HColumnDescriptor(noRepfamName);
     t1_syncupSource.addFamily(fam);
 
-    t1_syncupTarget = new HTableDescriptor(TableName.valueOf(t1_su));
+    t1_syncupTarget = new HTableDescriptor(t1_su);
     fam = new HColumnDescriptor(famName);
     t1_syncupTarget.addFamily(fam);
     fam = new HColumnDescriptor(noRepfamName);
     t1_syncupTarget.addFamily(fam);
 
-    t2_syncupSource = new HTableDescriptor(TableName.valueOf(t2_su));
+    t2_syncupSource = new HTableDescriptor(t2_su);
     fam = new HColumnDescriptor(famName);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
     t2_syncupSource.addFamily(fam);
     fam = new HColumnDescriptor(noRepfamName);
     t2_syncupSource.addFamily(fam);
 
-    t2_syncupTarget = new HTableDescriptor(TableName.valueOf(t2_su));
+    t2_syncupTarget = new HTableDescriptor(t2_su);
     fam = new HColumnDescriptor(famName);
     t2_syncupTarget.addFamily(fam);
     fam = new HColumnDescriptor(noRepfamName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
index 39067a1..43ee682 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
@@ -81,7 +81,7 @@ public class TestReplicationWithTags {
   private static final long SLEEP_TIME = 500;
   private static final int NB_RETRIES = 10;
 
-  private static final byte[] TABLE_NAME = Bytes.toBytes("TestReplicationWithTags");
+  private static final TableName TABLE_NAME = TableName.valueOf("TestReplicationWithTags");
   private static final byte[] FAMILY = Bytes.toBytes("f");
   private static final byte[] ROW = Bytes.toBytes("row");
 
@@ -132,7 +132,7 @@ public class TestReplicationWithTags {
     utility1.startMiniCluster(2);
     utility2.startMiniCluster(2);
 
-    HTableDescriptor table = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
+    HTableDescriptor table = new HTableDescriptor(TABLE_NAME);
     HColumnDescriptor fam = new HColumnDescriptor(FAMILY);
     fam.setMaxVersions(3);
     fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
index 3e834bd..1141c9f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
@@ -103,17 +103,17 @@ public class SecureTestUtil {
     }
   }
 
-  public static void checkTablePerms(Configuration conf, byte[] table, byte[] family, byte[] column,
+  public static void checkTablePerms(Configuration conf, TableName table, byte[] family, byte[] column,
       Permission.Action... actions) throws IOException {
     Permission[] perms = new Permission[actions.length];
     for (int i = 0; i < actions.length; i++) {
-      perms[i] = new TablePermission(TableName.valueOf(table), family, column, actions[i]);
+      perms[i] = new TablePermission(table, family, column, actions[i]);
     }
 
     checkTablePerms(conf, table, perms);
   }
 
-  public static void checkTablePerms(Configuration conf, byte[] table, Permission... perms) throws IOException {
+  public static void checkTablePerms(Configuration conf, TableName table, Permission... perms) throws IOException {
     CheckPermissionsRequest.Builder request = CheckPermissionsRequest.newBuilder();
     for (Permission p : perms) {
       request.addPermission(ProtobufUtil.toPermission(p));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index f94e6a3..3ed58cc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -192,7 +192,7 @@ public class TestAccessController extends SecureTestUtil {
       Coprocessor.PRIORITY_HIGHEST, 1, conf);
 
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     // create a set of test users
     SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" });
@@ -219,7 +219,7 @@ public class TestAccessController extends SecureTestUtil {
     htd.addFamily(hcd);
     htd.setOwner(USER_OWNER);
     admin.createTable(htd, new byte[][] { Bytes.toBytes("s") });
-    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName());
+    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName());
 
     HRegion region = TEST_UTIL.getHBaseCluster().getRegions(TEST_TABLE.getTableName()).get(0);
     RegionCoprocessorHost rcpHost = region.getCoprocessorHost();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
index 62d35bf..a305e10 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
@@ -57,7 +57,7 @@ public class TestAccessController2 extends SecureTestUtil {
     verifyConfiguration(conf);
     TEST_UTIL.startMiniCluster();
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
   }
 
   @AfterClass
@@ -86,7 +86,7 @@ public class TestAccessController2 extends SecureTestUtil {
         return null;
       }
     }, testUser);
-    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName());
+    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName());
     // Verify that owner permissions have been granted to the test user on the
     // table just created
     List<TablePermission> perms = AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName())

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5783795/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index 0459b9f..9484698 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -109,7 +109,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
     rsHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf);
 
     // Wait for the ACL table to become available
-    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName());
+    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
 
     // create a set of test users
     USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
@@ -136,7 +136,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
     htd.setOwner(USER_OWNER);
     htd.addFamily(hcd);
     admin.createTable(htd, new byte[][] { Bytes.toBytes("s") });
-    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName());
+    TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName());
   }
 
   @Test