You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ss...@apache.org on 2017/11/15 06:12:59 UTC

[3/3] phoenix git commit: PHOENIX-4321 Replace deprecated HBaseAdmin with Admin

PHOENIX-4321 Replace deprecated HBaseAdmin with Admin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/693fa659
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/693fa659
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/693fa659

Branch: refs/heads/5.x-HBase-2.0
Commit: 693fa6598df41c2cbd8111e465cf98d1b3ba1ec4
Parents: c85e065
Author: Sergey Soldatov <ss...@apache.org>
Authored: Thu Nov 9 13:29:50 2017 -0800
Committer: Sergey Soldatov <ss...@apache.org>
Committed: Tue Nov 14 22:12:35 2017 -0800

----------------------------------------------------------------------
 ...ReplayWithIndexWritesAndCompressedWALIT.java | 13 +--
 .../StatisticsCollectionRunTrackerIT.java       | 11 +--
 .../phoenix/end2end/AggregateQueryIT.java       |  8 +-
 .../apache/phoenix/end2end/AlterTableIT.java    | 11 ++-
 .../end2end/ColumnProjectionOptimizationIT.java |  9 +-
 .../apache/phoenix/end2end/CreateSchemaIT.java  |  4 +-
 .../apache/phoenix/end2end/CreateTableIT.java   | 45 ++++-----
 .../phoenix/end2end/DisableLocalIndexIT.java    |  7 +-
 .../apache/phoenix/end2end/DropSchemaIT.java    |  4 +-
 .../apache/phoenix/end2end/DynamicColumnIT.java |  4 +-
 .../phoenix/end2end/FlappingAlterTableIT.java   | 12 +--
 .../phoenix/end2end/FlappingLocalIndexIT.java   |  8 +-
 .../phoenix/end2end/LocalIndexSplitMergeIT.java |  6 +-
 .../phoenix/end2end/MappingTableDataTypeIT.java |  6 +-
 .../end2end/NamespaceSchemaMappingIT.java       |  8 +-
 .../phoenix/end2end/NativeHBaseTypesIT.java     |  5 +-
 .../phoenix/end2end/ProductMetricsIT.java       |  7 +-
 .../end2end/QueryDatabaseMetaDataIT.java        | 21 +++--
 .../apache/phoenix/end2end/ReverseScanIT.java   |  8 --
 .../apache/phoenix/end2end/SetPropertyIT.java   | 96 +++++++++----------
 .../end2end/SkipScanAfterManualSplitIT.java     | 11 ++-
 .../apache/phoenix/end2end/SkipScanQueryIT.java |  7 +-
 .../end2end/TableSnapshotReadsMapReduceIT.java  |  8 +-
 .../end2end/TenantSpecificTablesDDLIT.java      |  4 +-
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 20 ++--
 .../org/apache/phoenix/end2end/UseSchemaIT.java |  3 +-
 .../java/org/apache/phoenix/end2end/ViewIT.java |  6 +-
 .../phoenix/end2end/index/BaseIndexIT.java      |  4 +-
 .../phoenix/end2end/index/DropMetadataIT.java   |  4 +-
 .../phoenix/end2end/index/LocalIndexIT.java     | 19 ++--
 .../phoenix/end2end/index/MutableIndexIT.java   | 10 +-
 .../index/MutableIndexReplicationIT.java        |  9 +-
 .../end2end/index/MutableIndexSplitIT.java      | 10 +-
 .../end2end/index/PartialIndexRebuilderIT.java  |  9 +-
 .../end2end/index/txn/MutableRollbackIT.java    |  6 +-
 .../UpsertSelectOverlappingBatchesIT.java       |  6 +-
 .../FailForUnsupportedHBaseVersionsIT.java      |  4 +-
 .../iterate/RoundRobinResultIteratorIT.java     | 11 ++-
 .../apache/phoenix/rpc/PhoenixServerRpcIT.java  | 13 +--
 .../phoenix/tx/ParameterizedTransactionIT.java  | 10 +-
 .../hbase/index/write/RecoveryIndexWriter.java  | 10 +-
 .../mapreduce/index/IndexScrutinyTool.java      |  4 +-
 .../phoenix/mapreduce/index/IndexTool.java      |  4 +-
 .../phoenix/query/ConnectionQueryServices.java  |  4 +-
 .../query/ConnectionQueryServicesImpl.java      | 98 ++++++++++----------
 .../query/ConnectionlessQueryServicesImpl.java  |  4 +-
 .../query/DelegateConnectionQueryServices.java  |  4 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  6 +-
 .../org/apache/phoenix/util/UpgradeUtil.java    | 57 ++++++------
 .../phoenix/hbase/index/IndexTableName.java     | 45 +++++++++
 .../apache/phoenix/hbase/index/TableName.java   | 45 ---------
 .../index/parallel/TestThreadPoolBuilder.java   |  6 +-
 .../index/parallel/TestThreadPoolManager.java   |  7 +-
 .../index/util/TestIndexManagementUtil.java     |  8 +-
 .../hbase/index/write/TestIndexWriter.java      |  4 +-
 .../index/write/TestParalleIndexWriter.java     |  4 +-
 .../write/TestParalleWriterIndexCommitter.java  |  4 +-
 .../index/write/TestWALRecoveryCaching.java     |  8 +-
 .../java/org/apache/phoenix/query/BaseTest.java |  6 +-
 .../query/ConnectionQueryServicesImplTest.java  |  6 +-
 .../java/org/apache/phoenix/util/TestUtil.java  | 25 ++---
 .../org/apache/phoenix/flume/PhoenixSinkIT.java |  7 +-
 .../apache/phoenix/hive/util/PhoenixUtil.java   |  4 +-
 63 files changed, 422 insertions(+), 415 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 9566e48..67b7df3 100644
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -38,9 +38,10 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -55,8 +56,8 @@ import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexTableName;
 import org.apache.phoenix.hbase.index.IndexTestingUtils;
-import org.apache.phoenix.hbase.index.TableName;
 import org.apache.phoenix.hbase.index.covered.ColumnGroup;
 import org.apache.phoenix.hbase.index.covered.CoveredColumn;
 import org.apache.phoenix.hbase.index.covered.CoveredColumnIndexSpecifierBuilder;
@@ -86,7 +87,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
 
   public static final Log LOG = LogFactory.getLog(WALReplayWithIndexWritesAndCompressedWALIT.class);
   @Rule
-  public TableName table = new TableName();
+  public IndexTableName table = new IndexTableName();
   private String INDEX_TABLE_NAME = table.getTableNameString() + "_INDEX";
 
   final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@@ -236,9 +237,9 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
     assertEquals("Primary region wasn't updated from WAL replay!", 1, result.size());
 
     // cleanup the index table
-    HBaseAdmin admin = UTIL.getHBaseAdmin();
-    admin.disableTable(INDEX_TABLE_NAME);
-    admin.deleteTable(INDEX_TABLE_NAME);
+    Admin admin = UTIL.getHBaseAdmin();
+    admin.disableTable(TableName.valueOf(INDEX_TABLE_NAME));
+    admin.deleteTable(TableName.valueOf(INDEX_TABLE_NAME));
     admin.close();
   }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
index cf475f9..27ebec0 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
@@ -30,8 +30,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.stats.StatisticsCollectionRunTracker;
@@ -137,13 +136,13 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
     }
 
     private HRegionInfo createTableAndGetRegion(String tableName) throws Exception {
-        byte[] tableNameBytes = Bytes.toBytes(tableName);
+        TableName tn = TableName.valueOf(tableName);
         String ddl = "CREATE TABLE " + tableName + " (PK1 VARCHAR PRIMARY KEY, KV1 VARCHAR)";
         try (Connection conn = DriverManager.getConnection(getUrl())) {
             conn.createStatement().execute(ddl);
             PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-            try (HBaseAdmin admin = phxConn.getQueryServices().getAdmin()) {
-                List<HRegionInfo> tableRegions = admin.getTableRegions(tableNameBytes);
+            try (Admin admin = phxConn.getQueryServices().getAdmin()) {
+                List<HRegionInfo> tableRegions = admin.getTableRegions(tn);
                 return tableRegions.get(0);
             }
         }
@@ -157,7 +156,7 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
     
     private void runMajorCompaction(String tableName) throws Exception {
         try (PhoenixConnection conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class)) {
-            try (HBaseAdmin admin = conn.getQueryServices().getAdmin()) {
+            try (Admin admin = conn.getQueryServices().getAdmin()) {
                 TableName t = TableName.valueOf(tableName);
                 admin.flush(t);
                 admin.majorCompact(t);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/AggregateQueryIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AggregateQueryIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AggregateQueryIT.java
index 437ee4f..cb892c6 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AggregateQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AggregateQueryIT.java
@@ -37,9 +37,9 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -83,7 +83,7 @@ public class AggregateQueryIT extends BaseQueryIT {
         String query = "SELECT a_string, b_string, count(1) FROM " + tableName + " WHERE organization_id=? and entity_id<=? GROUP BY a_string,b_string";
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         Connection conn = DriverManager.getConnection(getUrl(), props);
-        HBaseAdmin admin = null;
+        Admin admin = null;
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             statement.setString(1, tenantId);
@@ -103,7 +103,7 @@ public class AggregateQueryIT extends BaseQueryIT {
             assertEquals(1, rs.getLong(3));
             assertFalse(rs.next());
             
-            byte[] tableNameBytes = Bytes.toBytes(tableName);
+            TableName tn =TableName.valueOf(tableName);
             admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
             Table htable = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(tableNameBytes);
             Configuration configuration = conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration();
@@ -111,7 +111,7 @@ public class AggregateQueryIT extends BaseQueryIT {
             ((ClusterConnection)hbaseConn).clearRegionCache(TableName.valueOf(tableName));
             RegionLocator regionLocator = hbaseConn.getRegionLocator(TableName.valueOf(tableName));
             int nRegions = regionLocator.getAllRegionLocations().size();
-            admin.split(tableNameBytes, ByteUtil.concat(Bytes.toBytes(tenantId), Bytes.toBytes("00A3")));
+            admin.split(tn, ByteUtil.concat(Bytes.toBytes(tenantId), Bytes.toBytes("00A3")));
             int retryCount = 0;
             do {
                 Thread.sleep(2000);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 5265b09..903fddc 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -48,7 +48,8 @@ import java.util.Properties;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -231,8 +232,8 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
         try {
             conn.createStatement().execute(ddl);
             conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD CF.col2 integer CF.IN_MEMORY=true");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)).getColumnFamilies();
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("0", columnFamilies[0].getNameAsString());
                 assertFalse(columnFamilies[0].isInMemory());
@@ -936,8 +937,8 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
             assertEquals(3, rs.getShort("KEY_SEQ"));
             assertFalse(rs.next());
 
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("0", columnFamilies[0].getNameAsString());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
index 08ecee6..56947bb 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
@@ -44,7 +44,8 @@ import java.util.Properties;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -216,7 +217,7 @@ public class ColumnProjectionOptimizationIT extends ParallelStatsDisabledIT {
         byte[][] familyNames = new byte[][] { cfB, cfC };
         String table = generateUniqueName();
         byte[] htableName = SchemaUtil.getTableNameAsBytes("", table);
-        HBaseAdmin admin = pconn.getQueryServices().getAdmin();
+        Admin admin = pconn.getQueryServices().getAdmin();
 
         @SuppressWarnings("deprecation")
         HTableDescriptor descriptor = new HTableDescriptor(htableName);
@@ -298,8 +299,8 @@ public class ColumnProjectionOptimizationIT extends ParallelStatsDisabledIT {
             assertFalse(rs.next());
         } finally {
             if (htable != null) htable.close();
-            admin.disableTable(htableName);
-            admin.deleteTable(htableName);
+            admin.disableTable(TableName.valueOf(htableName));
+            admin.deleteTable(TableName.valueOf(htableName));
             admin.close();
         }
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateSchemaIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateSchemaIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateSchemaIT.java
index fe09dcd..a05d702 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateSchemaIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateSchemaIT.java
@@ -26,7 +26,7 @@ import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.Properties;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryServices;
@@ -45,7 +45,7 @@ public class CreateSchemaIT extends ParallelStatsDisabledIT {
         String schemaName = generateUniqueName();
         String ddl = "CREATE SCHEMA " + schemaName;
         try (Connection conn = DriverManager.getConnection(getUrl(), props);
-                HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();) {
+                Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();) {
             conn.createStatement().execute(ddl);
             assertNotNull(admin.getNamespaceDescriptor(schemaName));
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
index 1abc653..866bd85 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
@@ -35,8 +35,9 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -114,10 +115,10 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         try (Connection conn = DriverManager.getConnection(getUrl(), props);) {
             conn.createStatement().execute(ddl);
         }
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
-        assertNotNull(admin.getTableDescriptor(Bytes.toBytes(tableName)));
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        assertNotNull(admin.getTableDescriptor(TableName.valueOf(tableName)));
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(BloomType.NONE, columnFamilies[0].getBloomFilterType());
 
         try (Connection conn = DriverManager.getConnection(getUrl(), props);) {
@@ -136,8 +137,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         }
         try (Connection conn = DriverManager.getConnection(getUrl(), props);) {
             conn.createStatement().execute(ddl);
-            assertNotEquals(null, admin.getTableDescriptor(
-                SchemaUtil.getPhysicalTableName(tableName.getBytes(), true).getName()));
+            assertNotEquals(null, admin.getTableDescriptor(TableName.valueOf(
+                SchemaUtil.getPhysicalTableName(tableName.getBytes(), true).getName())));
         } finally {
             admin.close();
         }
@@ -185,9 +186,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(1, columnFamilies.length);
         assertEquals(86400, columnFamilies[0].getTimeToLive());
     }
@@ -238,9 +239,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(2, columnFamilies.length);
         assertEquals(86400, columnFamilies[0].getTimeToLive());
         assertEquals("B", columnFamilies[0].getNameAsString());
@@ -264,9 +265,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(2, columnFamilies.length);
         assertEquals("0", columnFamilies[0].getNameAsString());
         assertEquals(86400, columnFamilies[0].getTimeToLive());
@@ -292,9 +293,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(2, columnFamilies.length);
         assertEquals("0", columnFamilies[0].getNameAsString());
         assertEquals(DEFAULT_REPLICATION_SCOPE, columnFamilies[0].getScope());
@@ -319,9 +320,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(2, columnFamilies.length);
         assertEquals("B", columnFamilies[0].getNameAsString());
         assertEquals(0, columnFamilies[0].getScope());
@@ -344,9 +345,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(1, columnFamilies.length);
         assertEquals("a", columnFamilies[0].getNameAsString());
         assertEquals(10000, columnFamilies[0].getTimeToLive());
@@ -366,9 +367,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(1, columnFamilies.length);
         assertEquals("a", columnFamilies[0].getNameAsString());
         assertEquals(10000, columnFamilies[0].getTimeToLive());
@@ -385,9 +386,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
         Properties props = new Properties();
         Connection conn = DriverManager.getConnection(getUrl(), props);
         conn.createStatement().execute(ddl);
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
         HColumnDescriptor[] columnFamilies =
-                admin.getTableDescriptor(Bytes.toBytes(tableName)).getColumnFamilies();
+                admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
         assertEquals(BloomType.ROW, columnFamilies[0].getBloomFilterType());
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/DisableLocalIndexIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DisableLocalIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DisableLocalIndexIT.java
index 01fc24c..e28c3a7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DisableLocalIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DisableLocalIndexIT.java
@@ -26,7 +26,8 @@ import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.Properties;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -54,8 +55,8 @@ public class DisableLocalIndexIT extends ParallelStatsDisabledIT {
         conn.createStatement().execute("CREATE TABLE " + tableName + " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK PRIMARY KEY(K1,K2)) MULTI_TENANT=true");
         conn.createStatement().execute("UPSERT INTO " + tableName + " VALUES('t1','x')");
         conn.createStatement().execute("UPSERT INTO " + tableName + " VALUES('t2','y')");
-        HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-        assertFalse(admin.tableExists(Bytes.toBytes(MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX + tableName)));
+        Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+        assertFalse(admin.tableExists(TableName.valueOf(MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX + tableName)));
         admin.close();
         try {
             Table t = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX + tableName));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/DropSchemaIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DropSchemaIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DropSchemaIT.java
index 5c5420c..97ab29a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DropSchemaIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DropSchemaIT.java
@@ -31,7 +31,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.NamespaceNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryServices;
@@ -77,7 +77,7 @@ public class DropSchemaIT extends BaseUniqueNamesOwnClusterIT {
         String normalizeSchemaIdentifier = SchemaUtil.normalizeIdentifier(schema);
         String ddl = "DROP SCHEMA " + schema;
         try (Connection conn = DriverManager.getConnection(getUrl(), props);
-             HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+             Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
             try {
                 conn.createStatement().execute(ddl);
                 fail();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
index 714f80a..6a53906 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
@@ -34,7 +34,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.Table;
@@ -68,7 +68,7 @@ public class DynamicColumnIT extends ParallelStatsDisabledIT {
         tableName = generateUniqueName();
         try (PhoenixConnection pconn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class)) {
             ConnectionQueryServices services = pconn.getQueryServices();
-            try (HBaseAdmin admin = services.getAdmin()) {
+            try (Admin admin = services.getAdmin()) {
                 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
                 htd.addFamily(new HColumnDescriptor(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES));
                 htd.addFamily(new HColumnDescriptor(FAMILY_NAME_A));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
index e090b98..0e0e555 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
@@ -26,8 +26,8 @@ import java.util.Properties;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -58,8 +58,8 @@ public class FlappingAlterTableIT extends ParallelStatsDisabledIT {
         conn1.createStatement().execute(ddl);
         ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR";
         conn1.createStatement().execute(ddl);
-        try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)).getColumnFamilies();
+        try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
             assertEquals(2, columnFamilies.length);
             assertEquals("0", columnFamilies[0].getNameAsString());
             assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[0].getTimeToLive());
@@ -82,8 +82,8 @@ public class FlappingAlterTableIT extends ParallelStatsDisabledIT {
         conn1.createStatement().execute(ddl);
         ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR";
         conn1.createStatement().execute(ddl);
-        try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+        try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
             HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
             assertEquals(2, columnFamilies.length);
             assertEquals("0", columnFamilies[0].getNameAsString());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
index 0d64be0..517cd6a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
@@ -31,7 +31,7 @@ import java.util.concurrent.CountDownLatch;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
@@ -151,7 +151,7 @@ public class FlappingLocalIndexIT extends BaseLocalIndexIT {
             ResultSet rs = conn1.createStatement().executeQuery("SELECT COUNT(*) FROM " + indexTableName);
             assertTrue(rs.next());
             
-            HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
+            Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
             int numRegions = admin.getTableRegions(physicalTableName).size();
             
             String query = "SELECT * FROM " + tableName +" where v1 like 'a%'";
@@ -285,7 +285,7 @@ public class FlappingLocalIndexIT extends BaseLocalIndexIT {
         ResultSet rs = conn1.createStatement().executeQuery("SELECT COUNT(*) FROM " + indexTableName);
         assertTrue(rs.next());
         assertEquals(4, rs.getInt(1));
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
         org.apache.hadoop.hbase.client.Connection hbaseConn = admin.getConnection();
         Table indexTable = hbaseConn.getTable(TableName.valueOf(indexPhysicalTableName));
         Pair<byte[][], byte[][]> startEndKeys = hbaseConn.getRegionLocator(TableName.valueOf(indexPhysicalTableName)).getStartEndKeys();
@@ -330,7 +330,7 @@ public class FlappingLocalIndexIT extends BaseLocalIndexIT {
         conn1.createStatement().execute("UPSERT INTO "+tableName+" values('j',2,4,2,'a')");
         conn1.createStatement().execute("UPSERT INTO "+tableName+" values('q',3,1,1,'c')");
         conn1.commit();
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
         HTableDescriptor tableDescriptor = admin.getTableDescriptor(physicalTableName);
         tableDescriptor.addCoprocessor(DeleyOpenRegionObserver.class.getName(), null,
             QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY - 1, null);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
index 409e98f..0781097 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
@@ -32,7 +32,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.BaseTest;
@@ -111,7 +111,7 @@ public class LocalIndexSplitMergeIT extends BaseTest {
             ResultSet rs = conn1.createStatement().executeQuery("SELECT * FROM " + tableName);
             assertTrue(rs.next());
 
-            HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+            Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
             for (int i = 1; i < 5; i++) {
                 admin.split(physicalTableName, ByteUtil.concat(Bytes.toBytes(strings[3 * i])));
                 List<HRegionInfo> regionsOfUserTable =
@@ -212,7 +212,7 @@ public class LocalIndexSplitMergeIT extends BaseTest {
             ResultSet rs = conn1.createStatement().executeQuery("SELECT * FROM " + tableName);
             assertTrue(rs.next());
 
-            HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+            Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
             List<HRegionInfo> regionsOfUserTable =
                     MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
                         admin.getConnection(), physicalTableName, false);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
index fb78e1c..e8a4f80 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -57,7 +57,7 @@ public class MappingTableDataTypeIT extends ParallelStatsDisabledIT {
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         PhoenixConnection conn = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
         
-        HBaseAdmin admin = conn.getQueryServices().getAdmin();
+        Admin admin = conn.getQueryServices().getAdmin();
         try {
             // Create table then get the single region for our new table.
             HTableDescriptor descriptor = new HTableDescriptor(tableName);
@@ -104,7 +104,7 @@ public class MappingTableDataTypeIT extends ParallelStatsDisabledIT {
         }
     }
 
-    private void insertData(final byte[] tableName, HBaseAdmin admin, Table t) throws IOException,
+    private void insertData(final byte[] tableName, Admin admin, Table t) throws IOException,
             InterruptedException {
         Put p = new Put(Bytes.toBytes("row"));
         p.addColumn(Bytes.toBytes("cf1"), Bytes.toBytes("q1"), Bytes.toBytes("value1"));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
index d9a27f5..b0c681e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -61,7 +61,7 @@ public class NamespaceSchemaMappingIT extends ParallelStatsDisabledIT {
 
         String phoenixFullTableName = schemaName + "." + tableName;
         String hbaseFullTableName = schemaName + ":" + tableName;
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
         admin.createNamespace(NamespaceDescriptor.create(namespace).build());
         admin.createTable(new HTableDescriptor(TableName.valueOf(namespace, tableName))
                 .addFamily(new HColumnDescriptor(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES)));
@@ -106,8 +106,8 @@ public class NamespaceSchemaMappingIT extends ParallelStatsDisabledIT {
         rs = conn.createStatement().executeQuery(query);
         assertTrue(rs.next());
         assertEquals(hbaseFullTableName, rs.getString(1));
-        admin.disableTable(phoenixFullTableName);
-        admin.deleteTable(phoenixFullTableName);
+        admin.disableTable(TableName.valueOf(phoenixFullTableName));
+        admin.deleteTable(TableName.valueOf(phoenixFullTableName));
         conn.createStatement().execute("DROP TABLE " + phoenixFullTableName);
         admin.close();
         conn.close();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
index 3b17ad1..5ece0bd 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
@@ -35,9 +35,10 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Row;
@@ -72,7 +73,7 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
         final byte[] tableBytes = tableName.getBytes();
         final byte[] familyName = Bytes.toBytes(SchemaUtil.normalizeIdentifier("1"));
         final byte[][] splits = new byte[][] {Bytes.toBytes(20), Bytes.toBytes(30)};
-        HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).getAdmin();
+        Admin admin = driver.getConnectionQueryServices(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).getAdmin();
         try {
             HTableDescriptor descriptor = new HTableDescriptor(tableBytes);
             HColumnDescriptor columnDescriptor =  new HColumnDescriptor(familyName);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/ProductMetricsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ProductMetricsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ProductMetricsIT.java
index 858a0fd..e673397 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ProductMetricsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ProductMetricsIT.java
@@ -36,7 +36,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryConstants;
@@ -1522,11 +1523,11 @@ public class ProductMetricsIT extends ParallelStatsDisabledIT {
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         Connection conn = DriverManager.getConnection(getUrl(), props);
 
-        HBaseAdmin admin = null;
+        Admin admin = null;
         try {
             initTableValues(tablename, tenantId, getSplits(tenantId));
             admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-            admin.flush(SchemaUtil.getTableNameAsBytes(PRODUCT_METRICS_SCHEMA_NAME,tablename));
+            admin.flush(TableName.valueOf(SchemaUtil.getTableNameAsBytes(PRODUCT_METRICS_SCHEMA_NAME,tablename)));
             String query = "SELECT SUM(TRANSACTIONS) FROM " + tablename + " WHERE FEATURE=?";
             PreparedStatement statement = conn.prepareStatement(query);
             statement.setString(1, F1);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
index af5a52a..c65ca63 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
@@ -47,7 +47,8 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -707,11 +708,11 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
             byte[] cfC = Bytes.toBytes("c");
             byte[][] familyNames = new byte[][] { cfB, cfC };
             byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
-            HBaseAdmin admin = pconn.getQueryServices().getAdmin();
+            Admin admin = pconn.getQueryServices().getAdmin();
             try {
-                admin.disableTable(htableName);
-                admin.deleteTable(htableName);
-                admin.enableTable(htableName);
+                admin.disableTable(TableName.valueOf(htableName));
+                admin.deleteTable(TableName.valueOf(htableName));
+                admin.enableTable(TableName.valueOf(htableName));
             } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
             }
 
@@ -725,7 +726,7 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
             createMDTestTable(pconn, tableName,
                 "a." + HColumnDescriptor.KEEP_DELETED_CELLS + "=" + Boolean.TRUE);
 
-            descriptor = admin.getTableDescriptor(htableName);
+            descriptor = admin.getTableDescriptor(TableName.valueOf(htableName));
             assertEquals(3, descriptor.getColumnFamilies().length);
             HColumnDescriptor cdA = descriptor.getFamily(cfA);
             assertNotEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCells());
@@ -786,10 +787,10 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
             byte[] cfC = Bytes.toBytes("c");
             byte[][] familyNames = new byte[][] { cfB, cfC };
             byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
-            try (HBaseAdmin admin = pconn.getQueryServices().getAdmin()) {
+            try (Admin admin = pconn.getQueryServices().getAdmin()) {
                 try {
-                    admin.disableTable(htableName);
-                    admin.deleteTable(htableName);
+                    admin.disableTable(TableName.valueOf(htableName));
+                    admin.deleteTable(TableName.valueOf(htableName));
                 } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
                 }
 
@@ -865,7 +866,7 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
 
             Table htable =
                     pconn.getQueryServices()
-                            .getTable(SchemaUtil.getTableNameAsBytes(schemaName, tableName));
+                            .getTable(TableName.valueOf(SchemaUtil.getTableNameAsBytes(schemaName, tableName)));
             Put put = new Put(Bytes.toBytes("0"));
             put.addColumn(cfB, Bytes.toBytes("COL1"), PInteger.INSTANCE.toBytes(1));
             put.addColumn(cfC, Bytes.toBytes("COL2"), PLong.INSTANCE.toBytes(2));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
index f172d00..8ea1876 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
@@ -35,21 +35,13 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.Statement;
-import java.util.Map;
 import java.util.Properties;
 
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.apache.phoenix.util.TestUtil;
-import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
 
 
 public class ReverseScanIT extends ParallelStatsDisabledIT {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
index 7a7576d..d785063 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
@@ -32,8 +32,8 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.PTable;
@@ -94,8 +94,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
         conn1.createStatement().execute(ddl);
         ddl = "ALTER TABLE " + dataTableFullName + " SET REPLICATION_SCOPE=1";
         conn1.createStatement().execute(ddl);
-        try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+        try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                     .getColumnFamilies();
             assertEquals(1, columnFamilies.length);
             assertEquals("0", columnFamilies[0].getNameAsString());
@@ -117,8 +117,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
         conn1.createStatement().execute(ddl);
         ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED=FALSE";
         conn1.createStatement().execute(ddl);
-        try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+        try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
             assertEquals(1, tableDesc.getColumnFamilies().length);
             assertEquals("0", tableDesc.getColumnFamilies()[0].getNameAsString());
             assertEquals(Boolean.toString(false), tableDesc.getValue(HTableDescriptor.COMPACTION_ENABLED));
@@ -139,8 +139,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
         conn1.createStatement().execute(ddl);
         ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED = FALSE, REPLICATION_SCOPE = 1";
         conn1.createStatement().execute(ddl);
-        try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+        try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
             HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
             assertEquals(1, columnFamilies.length);
             assertEquals("0", columnFamilies[0].getNameAsString());
@@ -168,8 +168,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
         ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED = FALSE, CF1.MIN_VERSIONS = 1, CF2.MIN_VERSIONS = 3, MIN_VERSIONS = 8, CF1.KEEP_DELETED_CELLS = true, KEEP_DELETED_CELLS = false";
         conn.createStatement().execute(ddl);
 
-        try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+        try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
             HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
             assertEquals(3, columnFamilies.length);
 
@@ -387,8 +387,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                 + " SET COMPACTION_ENABLED = FALSE, CF.REPLICATION_SCOPE=1, IMMUTABLE_ROWS = TRUE, TTL=1000";
         conn.createStatement().execute(ddl);
         assertImmutableRows(conn, dataTableFullName, true);
-        try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+        try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+            HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
             HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
             assertEquals(2, columnFamilies.length);
             assertEquals("CF", columnFamilies[0].getNameAsString());
@@ -418,8 +418,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             conn.createStatement().execute(ddl);
             conn.createStatement().execute(
                     "ALTER TABLE " + dataTableFullName + " ADD CF.col3 integer CF.IN_MEMORY=true");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("0", columnFamilies[0].getNameAsString());
@@ -447,8 +447,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                             "ALTER TABLE "
                                     + dataTableFullName
                                     + " ADD col4 integer, CF1.col5 integer, CF2.col6 integer IN_MEMORY=true, CF1.REPLICATION_SCOPE=1, CF2.IN_MEMORY=false ");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(3, columnFamilies.length);
                 assertEquals("0", columnFamilies[0].getNameAsString());
@@ -481,8 +481,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                             "ALTER TABLE "
                                     + dataTableFullName
                                     + " ADD col4 integer, CF1.col5 integer, CF2.col6 integer IN_MEMORY=true, CF1.REPLICATION_SCOPE=1, CF2.IN_MEMORY=false, XYZ.REPLICATION_SCOPE=1 ");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(3, columnFamilies.length);
                 assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -538,8 +538,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                             "ALTER TABLE "
                                     + dataTableFullName
                                     + " ADD col4 integer, CF1.col5 integer, CF2.col6 integer, CF3.col7 integer CF1.REPLICATION_SCOPE=1, CF1.IN_MEMORY=false, IN_MEMORY=true ");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(4, columnFamilies.length);
                 assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -574,8 +574,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                     "ALTER TABLE " + dataTableFullName + " ADD col4 integer XYZ.REPLICATION_SCOPE=1 ");
             conn.createStatement()
                     .execute("ALTER TABLE " + dataTableFullName + " ADD XYZ.col5 integer IN_MEMORY=true ");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -601,8 +601,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
         try {
             conn.createStatement().execute(ddl);
             conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD col2 integer IN_MEMORY=true");
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName))
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
                         .getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals("0", columnFamilies[0].getNameAsString());
@@ -631,8 +631,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             String ddl = "Alter table " + dataTableFullName + " add cf3.col5 integer, cf4.col6 integer in_memory=true";
             conn.createStatement().execute(ddl);
 
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 assertTrue(tableDesc.isCompactionEnabled());
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(5, columnFamilies.length);
@@ -670,8 +670,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             String ddl = "Alter table " + dataTableFullName + " add cf1.col5 integer in_memory=true";
             conn.createStatement().execute(ddl);
 
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 assertTrue(tableDesc.isCompactionEnabled());
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(3, columnFamilies.length);
@@ -747,8 +747,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
                     + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)"
                     + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'");
             conn.createStatement().execute(ddl);
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals("XYZ", columnFamilies[0].getNameAsString());
@@ -757,8 +757,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " SET TTL=30";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals(30, columnFamilies[0].getTimeToLive());
@@ -785,8 +785,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " SET IN_MEMORY=true";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals(true, columnFamilies[0].isInMemory());
@@ -813,8 +813,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " SET IN_MEMORY=true";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals(true, columnFamilies[0].isInMemory());
@@ -841,8 +841,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " ADD COL3 INTEGER IN_MEMORY=true";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(1, columnFamilies.length);
                 assertEquals(true, columnFamilies[0].isInMemory());
@@ -869,8 +869,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " ADD NEWCF.COL3 INTEGER IN_MEMORY=true";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("NEWCF", columnFamilies[0].getNameAsString());
@@ -899,8 +899,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " ADD NEWCF.COL3 INTEGER IN_MEMORY=true";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("NEWCF", columnFamilies[0].getNameAsString());
@@ -914,8 +914,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " SET TTL=1000";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("NEWCF", columnFamilies[0].getNameAsString());
@@ -932,8 +932,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             ddl = "ALTER TABLE " + dataTableFullName + " ADD COL3 INTEGER";
             conn.createStatement().execute(ddl);
             conn.commit();
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
                 assertEquals(2, columnFamilies.length);
                 assertEquals("NEWCF", columnFamilies[0].getNameAsString());
@@ -969,8 +969,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
             }
             ddl = "ALTER TABLE " + dataTableFullName + " SET UNKNOWN_PROP='ABC'";
             conn.createStatement().execute(ddl);
-            try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
+            try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
                 assertEquals("ABC", tableDesc.getValue("UNKNOWN_PROP"));
             }
         } finally {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanAfterManualSplitIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanAfterManualSplitIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanAfterManualSplitIT.java
index e4add9a..658ef92 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanAfterManualSplitIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanAfterManualSplitIT.java
@@ -31,7 +31,8 @@ import java.sql.SQLException;
 import java.util.Properties;
 
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.ConnectionQueryServices;
@@ -86,9 +87,9 @@ public class SkipScanAfterManualSplitIT extends ParallelStatsDisabledIT {
         }
         conn.commit();
         ConnectionQueryServices services = conn.unwrap(PhoenixConnection.class).getQueryServices();
-        HBaseAdmin admin = services.getAdmin();
+        Admin admin = services.getAdmin();
         try {
-            admin.flush(tableName);
+            admin.flush(TableName.valueOf(tableName));
         } finally {
             admin.close();
         }
@@ -104,9 +105,9 @@ public class SkipScanAfterManualSplitIT extends ParallelStatsDisabledIT {
         ConnectionQueryServices services = conn.unwrap(PhoenixConnection.class).getQueryServices();
         int nRegions = services.getAllTableRegions(tableNameBytes).size();
         int nInitialRegions = nRegions;
-        HBaseAdmin admin = services.getAdmin();
+        Admin admin = services.getAdmin();
         try {
-            admin.split(tableName);
+            admin.split(TableName.valueOf(tableName));
             int nTries = 0;
             while (nRegions == nInitialRegions && nTries < 10) {
                 Thread.sleep(1000);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
index d98bbe2..c4e4a9e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
@@ -34,7 +34,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.util.TestUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -529,7 +530,7 @@ public class SkipScanQueryIT extends ParallelStatsDisabledIT {
             stmt.setString(3, "T0");
             stmt.executeUpdate();
             conn.commit();
-            try (HBaseAdmin admin =
+            try (Admin admin =
                     conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
                 /*
                  * The split key is 27 bytes instead of at least 30 bytes (CHAR(15) + CHAR(15)).
@@ -537,7 +538,7 @@ public class SkipScanQueryIT extends ParallelStatsDisabledIT {
                  * it ends up padding the split point bytes to 30.
                  */
                 byte[] smallSplitKey = Bytes.toBytes("00Do0000000a8w10D5o000002Rhv");
-                admin.split(Bytes.toBytes(tableName), smallSplitKey);
+                admin.split(TableName.valueOf(tableName), smallSplitKey);
             }
             ResultSet rs =
                     conn.createStatement().executeQuery("SELECT EXTENSION FROM " + tableName

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/TableSnapshotReadsMapReduceIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TableSnapshotReadsMapReduceIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TableSnapshotReadsMapReduceIT.java
index cae91a3..612bf3b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TableSnapshotReadsMapReduceIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TableSnapshotReadsMapReduceIT.java
@@ -37,7 +37,7 @@ import java.util.UUID;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.io.NullWritable;
@@ -195,10 +195,10 @@ public class TableSnapshotReadsMapReduceIT extends BaseUniqueNamesOwnClusterIT {
     upsertData(tableName);
 
     Connection conn = DriverManager.getConnection(getUrl());
-    HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+    Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
     admin.snapshot(SNAPSHOT_NAME, TableName.valueOf(tableName));
     // call flush to create new files in the region
-    admin.flush(tableName);
+    admin.flush(TableName.valueOf(tableName));
 
     List<HBaseProtos.SnapshotDescription> snapshots = admin.listSnapshots();
     Assert.assertEquals(tableName, snapshots.get(0).getTable());
@@ -211,7 +211,7 @@ public class TableSnapshotReadsMapReduceIT extends BaseUniqueNamesOwnClusterIT {
 
     public void deleteSnapshot(String tableName) throws Exception {
         try (Connection conn = DriverManager.getConnection(getUrl());
-                HBaseAdmin admin =
+                Admin admin =
                         conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();) {
             admin.deleteSnapshot(SNAPSHOT_NAME);
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDDLIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDDLIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDDLIT.java
index f8dfd65..078229c 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDDLIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDDLIT.java
@@ -45,7 +45,7 @@ import java.util.List;
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -67,7 +67,7 @@ public class TenantSpecificTablesDDLIT extends BaseTenantSpecificTablesIT {
     public void testCreateTenantSpecificTable() throws Exception {
         // ensure we didn't create a physical HBase table for the tenant-specific table
         Connection conn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES));
-        HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+        Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
         assertEquals(0, admin.listTables(TENANT_TABLE_NAME).length);
     }
     

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 044c5ac..3481169 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -48,7 +48,8 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.curator.shaded.com.google.common.collect.Sets;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Table;
@@ -222,10 +223,10 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
                 }
             }
 
-            HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-            assertTrue(admin.tableExists(phoenixFullTableName));
-            assertTrue(admin.tableExists(schemaName + QueryConstants.NAME_SEPARATOR + indexName));
-            assertTrue(admin.tableExists(MetaDataUtil.getViewIndexPhysicalName(Bytes.toBytes(phoenixFullTableName))));
+            Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+            assertTrue(admin.tableExists(TableName.valueOf(phoenixFullTableName)));
+            assertTrue(admin.tableExists(TableName.valueOf(schemaName + QueryConstants.NAME_SEPARATOR + indexName)));
+            assertTrue(admin.tableExists(TableName.valueOf(MetaDataUtil.getViewIndexPhysicalName(Bytes.toBytes(phoenixFullTableName)))));
             Properties props = new Properties();
             props.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, Boolean.toString(true));
             props.setProperty(QueryServices.IS_SYSTEM_TABLE_MAPPED_TO_NAMESPACE, Boolean.toString(false));
@@ -249,10 +250,9 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
             admin = phxConn.getQueryServices().getAdmin();
             String hbaseTableName = SchemaUtil.getPhysicalTableName(Bytes.toBytes(phoenixFullTableName), true)
                     .getNameAsString();
-            assertTrue(admin.tableExists(hbaseTableName));
-            assertTrue(admin.tableExists(Bytes.toBytes(hbaseTableName)));
-            assertTrue(admin.tableExists(schemaName + QueryConstants.NAMESPACE_SEPARATOR + indexName));
-            assertTrue(admin.tableExists(MetaDataUtil.getViewIndexPhysicalName(Bytes.toBytes(hbaseTableName))));
+            assertTrue(admin.tableExists(TableName.valueOf(hbaseTableName)));
+            assertTrue(admin.tableExists(TableName.valueOf(schemaName + QueryConstants.NAMESPACE_SEPARATOR + indexName)));
+            assertTrue(admin.tableExists(TableName.valueOf(MetaDataUtil.getViewIndexPhysicalName(Bytes.toBytes(hbaseTableName)))));
             i = 0;
             // validate data
             for (String tableName : tableNames) {
@@ -631,7 +631,7 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
                 (DriverManager.getConnection(getUrl())).unwrap(PhoenixConnection.class)) {
             try (Table htable =
                     conn.getQueryServices().getTable(
-                        Bytes.toBytes(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME))) {
+                            Bytes.toBytes(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME))) {
                 RowMutations mutations = new RowMutations(rowKey);
                 mutations.add(viewColumnDefinitionPut);
                 htable.mutateRow(mutations);