You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by an...@apache.org on 2017/11/27 13:03:12 UTC
[4/8] phoenix git commit: PHOENIX-4304 Refactoring to avoid using
deprecated HTableDescriptor, HColumnDescriptor,
HRegionInfo( Rajeshbabu Chintaguntla)
PHOENIX-4304 Refactoring to avoid using deprecated HTableDescriptor, HColumnDescriptor, HRegionInfo( Rajeshbabu Chintaguntla)
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c3ec80d3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c3ec80d3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c3ec80d3
Branch: refs/heads/5.x-HBase-2.0
Commit: c3ec80d3166d8b365ae39a69bd57d24d8971c63d
Parents: 1beac27
Author: Ankit Singhal <an...@gmail.com>
Authored: Mon Nov 27 15:31:57 2017 +0530
Committer: Ankit Singhal <an...@gmail.com>
Committed: Mon Nov 27 15:31:57 2017 +0530
----------------------------------------------------------------------
.../wal/WALRecoveryRegionPostOpenIT.java | 8 +-
...ReplayWithIndexWritesAndCompressedWALIT.java | 41 +-
.../StatisticsCollectionRunTrackerIT.java | 18 +-
.../AlterMultiTenantTableWithViewsIT.java | 15 +-
.../apache/phoenix/end2end/AlterTableIT.java | 10 +-
.../end2end/ColumnProjectionOptimizationIT.java | 13 +-
.../apache/phoenix/end2end/CreateTableIT.java | 50 ++-
.../apache/phoenix/end2end/DynamicColumnIT.java | 14 +-
.../phoenix/end2end/FlappingAlterTableIT.java | 15 +-
.../phoenix/end2end/FlappingLocalIndexIT.java | 8 +-
.../end2end/IndexToolForPartialBuildIT.java | 2 +-
.../phoenix/end2end/LocalIndexSplitMergeIT.java | 28 +-
.../phoenix/end2end/MappingTableDataTypeIT.java | 14 +-
.../end2end/NamespaceSchemaMappingIT.java | 13 +-
.../phoenix/end2end/NativeHBaseTypesIT.java | 22 +-
.../end2end/QueryDatabaseMetaDataIT.java | 41 +-
.../apache/phoenix/end2end/SetPropertyIT.java | 89 ++--
.../end2end/SkipScanAfterManualSplitIT.java | 4 +-
.../org/apache/phoenix/end2end/UseSchemaIT.java | 10 +-
.../java/org/apache/phoenix/end2end/ViewIT.java | 22 +-
.../phoenix/end2end/index/BaseIndexIT.java | 12 +-
.../phoenix/end2end/index/DropMetadataIT.java | 20 +-
.../phoenix/end2end/index/LocalIndexIT.java | 27 +-
.../phoenix/end2end/index/MutableIndexIT.java | 96 +----
.../index/MutableIndexReplicationIT.java | 38 +-
.../end2end/index/MutableIndexSplitIT.java | 16 +-
.../UpsertSelectOverlappingBatchesIT.java | 24 +-
.../FailForUnsupportedHBaseVersionsIT.java | 18 +-
.../iterate/RoundRobinResultIteratorIT.java | 5 +-
.../apache/phoenix/rpc/PhoenixServerRpcIT.java | 18 +-
.../phoenix/schema/stats/StatsCollectorIT.java | 5 +-
.../phoenix/tx/ParameterizedTransactionIT.java | 29 +-
.../org/apache/phoenix/tx/TransactionIT.java | 51 +--
.../IndexHalfStoreFileReaderGenerator.java | 1 -
.../apache/phoenix/cache/ServerCacheClient.java | 8 +-
.../apache/phoenix/compile/UpsertCompiler.java | 4 +-
.../UngroupedAggregateRegionObserver.java | 4 +-
.../phoenix/execute/ClientAggregatePlan.java | 5 +-
.../org/apache/phoenix/hbase/index/Indexer.java | 12 +-
.../hbase/index/covered/NonTxIndexBuilder.java | 2 -
.../write/ParallelWriterIndexCommitter.java | 4 +-
.../TrackingParallelWriterIndexCommitter.java | 4 +-
.../apache/phoenix/index/PhoenixIndexCodec.java | 9 +-
.../index/PhoenixIndexFailurePolicy.java | 12 +-
.../phoenix/iterate/BaseResultIterators.java | 12 +-
.../iterate/MapReduceParallelScanGrouper.java | 15 +-
.../phoenix/iterate/RegionScannerFactory.java | 5 +-
.../apache/phoenix/iterate/SnapshotScanner.java | 76 ++--
.../iterate/TableSnapshotResultIterator.java | 28 +-
.../phoenix/parse/CreateTableStatement.java | 4 -
.../phoenix/query/ConnectionQueryServices.java | 4 +-
.../query/ConnectionQueryServicesImpl.java | 405 +++++++++----------
.../query/ConnectionlessQueryServicesImpl.java | 45 ++-
.../query/DelegateConnectionQueryServices.java | 4 +-
.../apache/phoenix/query/GuidePostsCache.java | 6 +-
.../apache/phoenix/query/QueryConstants.java | 18 +-
.../apache/phoenix/schema/MetaDataClient.java | 22 +-
.../transaction/OmidTransactionTable.java | 98 ++++-
.../transaction/PhoenixTransactionContext.java | 2 +
.../transaction/TephraTransactionTable.java | 30 +-
.../java/org/apache/phoenix/util/IndexUtil.java | 8 +-
.../org/apache/phoenix/util/MetaDataUtil.java | 49 +--
.../org/apache/phoenix/util/RepairUtil.java | 6 +-
.../java/org/apache/phoenix/util/ScanUtil.java | 4 +-
.../org/apache/phoenix/util/UpgradeUtil.java | 45 ++-
.../phoenix/compile/QueryCompilerTest.java | 4 +-
.../CoveredColumnIndexSpecifierBuilder.java | 12 +-
.../index/covered/NonTxIndexBuilderTest.java | 16 +-
.../index/util/TestIndexManagementUtil.java | 20 +-
.../hbase/index/write/TestIndexWriter.java | 16 +-
.../index/write/TestWALRecoveryCaching.java | 33 +-
.../recovery/TestPerRegionIndexWriteCache.java | 19 +-
.../java/org/apache/phoenix/query/BaseTest.java | 8 +-
.../schema/stats/StatisticsScannerTest.java | 7 +-
.../java/org/apache/phoenix/util/TestUtil.java | 25 +-
75 files changed, 967 insertions(+), 940 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALRecoveryRegionPostOpenIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALRecoveryRegionPostOpenIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALRecoveryRegionPostOpenIT.java
index 20d59a7..674c70c 100644
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALRecoveryRegionPostOpenIT.java
+++ b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALRecoveryRegionPostOpenIT.java
@@ -125,7 +125,7 @@ public class WALRecoveryRegionPostOpenIT extends BaseTest {
throw new DoNotRetryIOException();
}
Mutation operation = miniBatchOp.getOperation(0);
- Set<byte[]> keySet = operation.getFamilyMap().keySet();
+ Set<byte[]> keySet = operation.getFamilyCellMap().keySet();
for(byte[] family: keySet) {
if(Bytes.toString(family).startsWith(QueryConstants.LOCAL_INDEX_COLUMN_FAMILY_PREFIX) && failIndexTableWrite) {
throw new DoNotRetryIOException();
@@ -232,17 +232,17 @@ public class WALRecoveryRegionPostOpenIT extends BaseTest {
assertTrue(!Arrays.equals(mutations[0].getRow(),Bytes.toBytes("a")));
//wait for data table region repoen.
- List<Region> dataTableRegions=null;
+ List<HRegion> dataTableRegions=null;
for(int i=1;i<=200;i++) {
- dataTableRegions=liveRegionServer.getOnlineRegions(TableName.valueOf(DATA_TABLE_NAME));
+ dataTableRegions=liveRegionServer.getRegions(TableName.valueOf(DATA_TABLE_NAME));
if(dataTableRegions.size() > 0) {
break;
}
Thread.sleep(ONE_SEC);
}
- dataTableRegions=liveRegionServer.getOnlineRegions(TableName.valueOf(DATA_TABLE_NAME));
+ dataTableRegions=liveRegionServer.getRegions(TableName.valueOf(DATA_TABLE_NAME));
assertTrue(dataTableRegions.size()==1);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 67b7df3..2c6467a 100644
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -33,20 +33,23 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
@@ -100,7 +103,6 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
@Before
public void setUp() throws Exception {
setupCluster();
- Path hbaseRootDir = UTIL.getDataTestDir();
this.conf = HBaseConfiguration.create(UTIL.getConfiguration());
this.fs = UTIL.getDFSCluster().getFileSystem();
this.hbaseRootDir = new Path(this.conf.get(HConstants.HBASE_DIR));
@@ -167,15 +169,13 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
* seqids.
* @throws Exception on failure
*/
- @SuppressWarnings("deprecation")
@Test
public void testReplayEditsWrittenViaHRegion() throws Exception {
final String tableNameStr = "testReplayEditsWrittenViaHRegion";
- final HRegionInfo hri = new HRegionInfo(org.apache.hadoop.hbase.TableName.valueOf(tableNameStr),
- null, null, false);
+ final RegionInfo hri = RegionInfoBuilder.newBuilder(org.apache.hadoop.hbase.TableName.valueOf(tableNameStr)).setSplit(false).build();
final Path basedir = FSUtils.getTableDir(hbaseRootDir, org.apache.hadoop.hbase.TableName.valueOf(tableNameStr));
deleteDir(basedir);
- final HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr);
+ final TableDescriptor htd = createBasic3FamilyHTD(tableNameStr);
//setup basic indexing for the table
// enable indexing to a non-existant index table
@@ -216,7 +216,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
Mockito.any(Exception.class));
// then create the index table so we are successful on WAL replay
- TestIndexManagementUtil.createIndexTable(UTIL.getHBaseAdmin(), INDEX_TABLE_NAME);
+ TestIndexManagementUtil.createIndexTable(UTIL.getAdmin(), INDEX_TABLE_NAME);
// run the WAL split and setup the region
runWALSplit(this.conf, walFactory);
@@ -237,7 +237,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
assertEquals("Primary region wasn't updated from WAL replay!", 1, result.size());
// cleanup the index table
- Admin admin = UTIL.getHBaseAdmin();
+ Admin admin = UTIL.getAdmin();
admin.disableTable(TableName.valueOf(INDEX_TABLE_NAME));
admin.deleteTable(TableName.valueOf(INDEX_TABLE_NAME));
admin.close();
@@ -248,16 +248,15 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
* @param tableName name of the table descriptor
* @return
*/
- private HTableDescriptor createBasic3FamilyHTD(final String tableName) {
- @SuppressWarnings("deprecation")
- HTableDescriptor htd = new HTableDescriptor(tableName);
- HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a"));
- htd.addFamily(a);
- HColumnDescriptor b = new HColumnDescriptor(Bytes.toBytes("b"));
- htd.addFamily(b);
- HColumnDescriptor c = new HColumnDescriptor(Bytes.toBytes("c"));
- htd.addFamily(c);
- return htd;
+ private TableDescriptor createBasic3FamilyHTD(final String tableName) {
+ TableDescriptorBuilder tableBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
+ ColumnFamilyDescriptor a = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("a"));
+ tableBuilder.addColumnFamily(a);
+ ColumnFamilyDescriptor b = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("b"));
+ tableBuilder.addColumnFamily(b);
+ ColumnFamilyDescriptor c = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("c"));
+ tableBuilder.addColumnFamily(c);
+ return tableBuilder.build();
}
/*
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
index 27ebec0..71c9e01 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
@@ -28,9 +28,9 @@ import java.sql.PreparedStatement;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.stats.StatisticsCollectionRunTracker;
@@ -56,7 +56,7 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
@Test
public void testStateBeforeAndAfterUpdateStatsCommand() throws Exception {
String tableName = fullTableName;
- HRegionInfo regionInfo = createTableAndGetRegion(tableName);
+ RegionInfo regionInfo = createTableAndGetRegion(tableName);
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
// assert that the region wasn't added to the tracker
@@ -71,7 +71,7 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
@Test
public void testStateBeforeAndAfterMajorCompaction() throws Exception {
String tableName = fullTableName;
- HRegionInfo regionInfo = createTableAndGetRegion(tableName);
+ RegionInfo regionInfo = createTableAndGetRegion(tableName);
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
// Upsert values in the table.
@@ -99,7 +99,7 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
@Test
public void testMajorCompactionPreventsUpdateStatsFromRunning() throws Exception {
String tableName = fullTableName;
- HRegionInfo regionInfo = createTableAndGetRegion(tableName);
+ RegionInfo regionInfo = createTableAndGetRegion(tableName);
// simulate stats collection via major compaction by marking the region as compacting in the tracker
markRegionAsCompacting(regionInfo);
Assert.assertEquals("Row count didn't match", COMPACTION_UPDATE_STATS_ROW_COUNT, runUpdateStats(tableName));
@@ -112,7 +112,7 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
@Test
public void testUpdateStatsPreventsAnotherUpdateStatsFromRunning() throws Exception {
String tableName = fullTableName;
- HRegionInfo regionInfo = createTableAndGetRegion(tableName);
+ RegionInfo regionInfo = createTableAndGetRegion(tableName);
markRunningUpdateStats(regionInfo);
Assert.assertEquals("Row count didn't match", CONCURRENT_UPDATE_STATS_ROW_COUNT,
runUpdateStats(tableName));
@@ -123,26 +123,26 @@ public class StatisticsCollectionRunTrackerIT extends ParallelStatsEnabledIT {
assertTrue(tracker.removeUpdateStatsCommandRegion(regionInfo));
}
- private void markRegionAsCompacting(HRegionInfo regionInfo) {
+ private void markRegionAsCompacting(RegionInfo regionInfo) {
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
tracker.addCompactingRegion(regionInfo);
}
- private void markRunningUpdateStats(HRegionInfo regionInfo) {
+ private void markRunningUpdateStats(RegionInfo regionInfo) {
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
tracker.addUpdateStatsCommandRegion(regionInfo);
}
- private HRegionInfo createTableAndGetRegion(String tableName) throws Exception {
+ private RegionInfo createTableAndGetRegion(String tableName) throws Exception {
TableName tn = TableName.valueOf(tableName);
String ddl = "CREATE TABLE " + tableName + " (PK1 VARCHAR PRIMARY KEY, KV1 VARCHAR)";
try (Connection conn = DriverManager.getConnection(getUrl())) {
conn.createStatement().execute(ddl);
PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
try (Admin admin = phxConn.getQueryServices().getAdmin()) {
- List<HRegionInfo> tableRegions = admin.getTableRegions(tn);
+ List<RegionInfo> tableRegions = admin.getRegions(tn);
return tableRegions.get(0);
}
}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViewsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViewsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViewsIT.java
index 7b4ff68..60dbb44 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViewsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViewsIT.java
@@ -35,7 +35,8 @@ import java.sql.Statement;
import java.util.List;
import java.util.Properties;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.exception.SQLExceptionCode;
@@ -565,14 +566,14 @@ public class AlterMultiTenantTableWithViewsIT extends ParallelStatsDisabledIT {
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
- HTableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
+ TableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
// This should not modify the base table
String alterView = "ALTER VIEW " + view1 + " ADD NEWCOL1 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
- HTableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
+ TableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertEquals(tableDesc1, tableDesc2);
// Add a new column family that doesn't already exist in the base table
@@ -580,16 +581,16 @@ public class AlterMultiTenantTableWithViewsIT extends ParallelStatsDisabledIT {
tenant1Conn.createStatement().execute(alterView);
// Verify that the column family now shows up in the base table descriptor
- tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
+ tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertFalse(tableDesc2.equals(tableDesc1));
- assertNotNull(tableDesc2.getFamily(Bytes.toBytes("CF")));
+ assertNotNull(tableDesc2.getColumnFamily(Bytes.toBytes("CF")));
// Add a column with an existing column family. This shouldn't modify the base table.
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL3 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
- HTableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
+ TableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertTrue(tableDesc3.equals(tableDesc2));
- assertNotNull(tableDesc3.getFamily(Bytes.toBytes("CF")));
+ assertNotNull(tableDesc3.getColumnFamily(Bytes.toBytes("CF")));
}
}
}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 903fddc..34186bb 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -46,10 +46,10 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -233,7 +233,7 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD CF.col2 integer CF.IN_MEMORY=true");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertFalse(columnFamilies[0].isInMemory());
@@ -938,8 +938,8 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
assertFalse(rs.next());
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
index 56947bb..c399caf 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationIT.java
@@ -42,12 +42,13 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.types.PInteger;
@@ -219,13 +220,11 @@ public class ColumnProjectionOptimizationIT extends ParallelStatsDisabledIT {
byte[] htableName = SchemaUtil.getTableNameAsBytes("", table);
Admin admin = pconn.getQueryServices().getAdmin();
- @SuppressWarnings("deprecation")
- HTableDescriptor descriptor = new HTableDescriptor(htableName);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(htableName));
for (byte[] familyName : familyNames) {
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
- descriptor.addFamily(columnDescriptor);
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName));
}
- admin.createTable(descriptor);
+ admin.createTable(builder.build());
Properties props = new Properties();
Connection conn1 = DriverManager.getConnection(getUrl(), props);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
index 866bd85..b7b0957 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
@@ -17,7 +17,6 @@
*/
package org.apache.phoenix.end2end;
-import static org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
@@ -34,12 +33,11 @@ import java.sql.Statement;
import java.util.List;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.BloomType;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixStatement;
@@ -116,9 +114,9 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
}
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- assertNotNull(admin.getTableDescriptor(TableName.valueOf(tableName)));
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ assertNotNull(admin.getDescriptor(TableName.valueOf(tableName)));
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(BloomType.NONE, columnFamilies[0].getBloomFilterType());
try (Connection conn = DriverManager.getConnection(getUrl(), props);) {
@@ -137,7 +135,7 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
}
try (Connection conn = DriverManager.getConnection(getUrl(), props);) {
conn.createStatement().execute(ddl);
- assertNotEquals(null, admin.getTableDescriptor(TableName.valueOf(
+ assertNotEquals(null, admin.getDescriptor(TableName.valueOf(
SchemaUtil.getPhysicalTableName(tableName.getBytes(), true).getName())));
} finally {
admin.close();
@@ -187,8 +185,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(86400, columnFamilies[0].getTimeToLive());
}
@@ -240,8 +238,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals(86400, columnFamilies[0].getTimeToLive());
assertEquals("B", columnFamilies[0].getNameAsString());
@@ -266,8 +264,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(86400, columnFamilies[0].getTimeToLive());
@@ -294,11 +292,11 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
- assertEquals(DEFAULT_REPLICATION_SCOPE, columnFamilies[0].getScope());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_REPLICATION_SCOPE, columnFamilies[0].getScope());
assertEquals("B", columnFamilies[1].getNameAsString());
assertEquals(1, columnFamilies[1].getScope());
}
@@ -321,8 +319,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("B", columnFamilies[0].getNameAsString());
assertEquals(0, columnFamilies[0].getScope());
@@ -346,8 +344,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("a", columnFamilies[0].getNameAsString());
assertEquals(10000, columnFamilies[0].getTimeToLive());
@@ -368,8 +366,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("a", columnFamilies[0].getNameAsString());
assertEquals(10000, columnFamilies[0].getTimeToLive());
@@ -387,8 +385,8 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
Admin admin = driver.getConnectionQueryServices(getUrl(), props).getAdmin();
- HColumnDescriptor[] columnFamilies =
- admin.getTableDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies =
+ admin.getDescriptor(TableName.valueOf(tableName)).getColumnFamilies();
assertEquals(BloomType.ROW, columnFamilies[0].getBloomFilterType());
}
@@ -722,7 +720,7 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
}
@Test
- public void testSetHTableDescriptorPropertyOnView() throws Exception {
+ public void testSetTableDescriptorPropertyOnView() throws Exception {
Properties props = new Properties();
final String dataTableFullName = generateUniqueName();
String ddl =
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
index 6a53906..747107a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnIT.java
@@ -31,13 +31,13 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.ConnectionQueryServices;
@@ -69,11 +69,11 @@ public class DynamicColumnIT extends ParallelStatsDisabledIT {
try (PhoenixConnection pconn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class)) {
ConnectionQueryServices services = pconn.getQueryServices();
try (Admin admin = services.getAdmin()) {
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
- htd.addFamily(new HColumnDescriptor(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES));
- htd.addFamily(new HColumnDescriptor(FAMILY_NAME_A));
- htd.addFamily(new HColumnDescriptor(FAMILY_NAME_B));
- admin.createTable(htd);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES));
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME_A));
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME_B));
+ admin.createTable(builder.build());
}
try (Table hTable = services.getTable(Bytes.toBytes(tableName))) {
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
index 0e0e555..27285e3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java
@@ -24,10 +24,11 @@ import java.sql.Connection;
import java.sql.DriverManager;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.SchemaUtil;
@@ -59,12 +60,12 @@ public class FlappingAlterTableIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR";
conn1.createStatement().execute(ddl);
try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
- assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[0].getTimeToLive());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_TTL, columnFamilies[0].getTimeToLive());
assertEquals("CF", columnFamilies[1].getNameAsString());
- assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[1].getTimeToLive());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_TTL, columnFamilies[1].getTimeToLive());
}
}
@@ -83,8 +84,8 @@ public class FlappingAlterTableIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR";
conn1.createStatement().execute(ddl);
try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(1000, columnFamilies[0].getTimeToLive());
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
index 7e769ba..a5aa27e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingLocalIndexIT.java
@@ -328,11 +328,11 @@ public class FlappingLocalIndexIT extends BaseLocalIndexIT {
conn1.createStatement().execute("UPSERT INTO "+tableName+" values('q',3,1,1,'c')");
conn1.commit();
Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(physicalTableName);
- tableDescriptor.addCoprocessor(DeleyOpenRegionObserver.class.getName(), null,
- QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY - 1, null);
+ TableDescriptor tableDescriptor = admin.getDescriptor(physicalTableName);
+ tableDescriptor=TableDescriptorBuilder.newBuilder(tableDescriptor).addCoprocessor(DeleyOpenRegionObserver.class.getName(), null,
+ QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY - 1, null).build();
admin.disableTable(physicalTableName);
- admin.modifyTable(physicalTableName, tableDescriptor);
+ admin.modifyTable(tableDescriptor);
admin.enableTable(physicalTableName);
DeleyOpenRegionObserver.DELAY_OPEN = true;
conn1.createStatement().execute(
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildIT.java
index 19ffe1a..70812c3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildIT.java
@@ -270,7 +270,7 @@ public class IndexToolForPartialBuildIT extends BaseOwnClusterIT {
throw new DoNotRetryIOException();
}
Mutation operation = miniBatchOp.getOperation(0);
- Set<byte[]> keySet = operation.getFamilyMap().keySet();
+ Set<byte[]> keySet = operation.getFamilyCellMap().keySet();
for(byte[] family: keySet) {
if(Bytes.toString(family).startsWith(QueryConstants.LOCAL_INDEX_COLUMN_FAMILY_PREFIX) && FAIL_WRITE) {
throw new DoNotRetryIOException();
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
index 0781097..ee8063c 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LocalIndexSplitMergeIT.java
@@ -29,10 +29,10 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
-import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.BaseTest;
@@ -114,15 +114,15 @@ public class LocalIndexSplitMergeIT extends BaseTest {
Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
for (int i = 1; i < 5; i++) {
admin.split(physicalTableName, ByteUtil.concat(Bytes.toBytes(strings[3 * i])));
- List<HRegionInfo> regionsOfUserTable =
- MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
- admin.getConnection(), physicalTableName, false);
+ List<RegionInfo> regionsOfUserTable =
+ MetaTableAccessor.getTableRegions(admin.getConnection(), physicalTableName,
+ false);
while (regionsOfUserTable.size() != (4 + i)) {
Thread.sleep(100);
regionsOfUserTable =
- MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
- admin.getConnection(), physicalTableName, false);
+ MetaTableAccessor.getTableRegions(admin.getConnection(),
+ physicalTableName, false);
}
assertEquals(4 + i, regionsOfUserTable.size());
String[] tIdColumnValues = new String[26];
@@ -213,20 +213,20 @@ public class LocalIndexSplitMergeIT extends BaseTest {
assertTrue(rs.next());
Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
- List<HRegionInfo> regionsOfUserTable =
- MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
- admin.getConnection(), physicalTableName, false);
- admin.mergeRegions(regionsOfUserTable.get(0).getEncodedNameAsBytes(),
+ List<RegionInfo> regionsOfUserTable =
+ MetaTableAccessor.getTableRegions(admin.getConnection(), physicalTableName,
+ false);
+ admin.mergeRegionsAsync(regionsOfUserTable.get(0).getEncodedNameAsBytes(),
regionsOfUserTable.get(1).getEncodedNameAsBytes(), false);
regionsOfUserTable =
- MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
- admin.getConnection(), physicalTableName, false);
+ MetaTableAccessor.getTableRegions(admin.getConnection(), physicalTableName,
+ false);
while (regionsOfUserTable.size() != 3) {
Thread.sleep(100);
regionsOfUserTable =
- MetaTableAccessor.getTableRegions(getUtility().getZooKeeperWatcher(),
- admin.getConnection(), physicalTableName, false);
+ MetaTableAccessor.getTableRegions(admin.getConnection(), physicalTableName,
+ false);
}
String query = "SELECT t_id,k1,v1 FROM " + tableName;
rs = conn1.createStatement().executeQuery(query);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
index e8a4f80..f064730 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MappingTableDataTypeIT.java
@@ -34,15 +34,15 @@ import java.util.List;
import java.util.Properties;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.util.PropertiesUtil;
@@ -60,12 +60,10 @@ public class MappingTableDataTypeIT extends ParallelStatsDisabledIT {
Admin admin = conn.getQueryServices().getAdmin();
try {
// Create table then get the single region for our new table.
- HTableDescriptor descriptor = new HTableDescriptor(tableName);
- HColumnDescriptor columnDescriptor1 = new HColumnDescriptor(Bytes.toBytes("cf1"));
- HColumnDescriptor columnDescriptor2 = new HColumnDescriptor(Bytes.toBytes("cf2"));
- descriptor.addFamily(columnDescriptor1);
- descriptor.addFamily(columnDescriptor2);
- admin.createTable(descriptor);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("cf1")))
+ .addColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("cf2")));
+ admin.createTable(builder.build());
Table t = conn.getQueryServices().getTable(Bytes.toBytes(mtest));
insertData(tableName.getName(), admin, t);
t.close();
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
index b0c681e..7d24cdd 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NamespaceSchemaMappingIT.java
@@ -25,13 +25,13 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.query.QueryConstants;
@@ -52,7 +52,6 @@ public class NamespaceSchemaMappingIT extends ParallelStatsDisabledIT {
* namespace or not
*/
@Test
- @SuppressWarnings("deprecation")
public void testBackWardCompatibility() throws Exception {
String namespace = "TEST_SCHEMA";
@@ -63,10 +62,10 @@ public class NamespaceSchemaMappingIT extends ParallelStatsDisabledIT {
String hbaseFullTableName = schemaName + ":" + tableName;
Admin admin = driver.getConnectionQueryServices(getUrl(), TestUtil.TEST_PROPERTIES).getAdmin();
admin.createNamespace(NamespaceDescriptor.create(namespace).build());
- admin.createTable(new HTableDescriptor(TableName.valueOf(namespace, tableName))
- .addFamily(new HColumnDescriptor(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES)));
- admin.createTable(new HTableDescriptor(TableName.valueOf(phoenixFullTableName))
- .addFamily(new HColumnDescriptor(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES)));
+ admin.createTable(TableDescriptorBuilder.newBuilder(TableName.valueOf(namespace, tableName))
+ .addColumnFamily(ColumnFamilyDescriptorBuilder.of(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES)).build());
+ admin.createTable(TableDescriptorBuilder.newBuilder(TableName.valueOf(phoenixFullTableName))
+ .addColumnFamily(ColumnFamilyDescriptorBuilder.of(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES)).build());
Put put = new Put(PVarchar.INSTANCE.toBytes(phoenixFullTableName));
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES,
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
index 5ece0bd..ebe5425 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/NativeHBaseTypesIT.java
@@ -32,17 +32,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -67,7 +68,6 @@ import org.junit.Test;
public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
- @SuppressWarnings("deprecation")
private String initTableValues() throws Exception {
final String tableName = SchemaUtil.getTableName(generateUniqueName(), generateUniqueName());
final byte[] tableBytes = tableName.getBytes();
@@ -75,11 +75,10 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
final byte[][] splits = new byte[][] {Bytes.toBytes(20), Bytes.toBytes(30)};
Admin admin = driver.getConnectionQueryServices(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).getAdmin();
try {
- HTableDescriptor descriptor = new HTableDescriptor(tableBytes);
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
- columnDescriptor.setKeepDeletedCells(true);
- descriptor.addFamily(columnDescriptor);
- admin.createTable(descriptor, splits);
+ admin.createTable(TableDescriptorBuilder.newBuilder(TableName.valueOf(tableBytes))
+ .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(familyName)
+ .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .build(), splits);
} finally {
admin.close();
}
@@ -149,7 +148,7 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
" \"1\".uint_col unsigned_int," +
" \"1\".ulong_col unsigned_long" +
" CONSTRAINT pk PRIMARY KEY (uint_key, ulong_key, string_key))\n" +
- HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + DataBlockEncoding.NONE + "'";
+ ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING + "='" + DataBlockEncoding.NONE + "'";
try (Connection conn = DriverManager.getConnection(url)) {
conn.createStatement().execute(ddl);
@@ -162,7 +161,6 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
public void testRangeQuery1() throws Exception {
String tableName = initTableValues();
String query = "SELECT uint_key, ulong_key, string_key FROM " + tableName + " WHERE uint_key > 20 and ulong_key >= 400";
- Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl());
try {
PreparedStatement statement = conn.prepareStatement(query);
@@ -181,7 +179,6 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
public void testRangeQuery2() throws Exception {
String tableName = initTableValues();
String query = "SELECT uint_key, ulong_key, string_key FROM " + tableName + " WHERE uint_key > 20 and uint_key < 40";
- Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl());
try {
PreparedStatement statement = conn.prepareStatement(query);
@@ -268,7 +265,6 @@ public class NativeHBaseTypesIT extends ParallelStatsDisabledIT {
}
}
- @SuppressWarnings("deprecation")
@Test
public void testNegativeCompareNegativeValue() throws Exception {
String tableName = initTableValues();
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
index c65ca63..63b7c36 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
@@ -44,13 +44,15 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
@@ -83,7 +85,7 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
+ " a.col1 integer,\n" + " b.col2 bigint,\n" + " b.col3 decimal,\n"
+ " b.col4 decimal(5),\n" + " b.col5 decimal(6,3))\n" + " a."
+ HConstants.VERSIONS + "=" + 1 + "," + "a."
- + HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + DataBlockEncoding.NONE
+ + ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING + "='" + DataBlockEncoding.NONE
+ "'";
if (extraProps != null && extraProps.length() > 0) {
ddl += "," + extraProps;
@@ -716,32 +718,30 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
}
- @SuppressWarnings("deprecation")
- HTableDescriptor descriptor = new HTableDescriptor(htableName);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(htableName));
for (byte[] familyName : familyNames) {
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
- descriptor.addFamily(columnDescriptor);
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName));
}
- admin.createTable(descriptor);
+ admin.createTable(builder.build());
createMDTestTable(pconn, tableName,
- "a." + HColumnDescriptor.KEEP_DELETED_CELLS + "=" + Boolean.TRUE);
+ "a." + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=" + Boolean.TRUE);
- descriptor = admin.getTableDescriptor(TableName.valueOf(htableName));
+ TableDescriptor descriptor = admin.getDescriptor(TableName.valueOf(htableName));
assertEquals(3, descriptor.getColumnFamilies().length);
- HColumnDescriptor cdA = descriptor.getFamily(cfA);
- assertNotEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCells());
+ ColumnFamilyDescriptor cdA = descriptor.getColumnFamily(cfA);
+ assertNotEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCells());
assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using
// WITH
assertEquals(1, cdA.getMaxVersions());// Overriden using WITH
- HColumnDescriptor cdB = descriptor.getFamily(cfB);
+ ColumnFamilyDescriptor cdB = descriptor.getColumnFamily(cfB);
// Allow KEEP_DELETED_CELLS to be false for VIEW
- assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCells());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCells());
assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the
// original value.
// CF c should stay the same since it's not a Phoenix cf.
- HColumnDescriptor cdC = descriptor.getFamily(cfC);
+ ColumnFamilyDescriptor cdC = descriptor.getColumnFamily(cfC);
assertNotNull("Column family not found", cdC);
- assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCells());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCells());
assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
@@ -794,12 +794,11 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
}
- HTableDescriptor descriptor = new HTableDescriptor(htableName);
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(htableName));
for (byte[] familyName : familyNames) {
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
- descriptor.addFamily(columnDescriptor);
+ builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName));
}
- admin.createTable(descriptor);
+ admin.createTable(builder.build());
}
String createStmt =
"create view " + generateUniqueName() + " (id char(1) not null primary key,\n"
@@ -866,7 +865,7 @@ public class QueryDatabaseMetaDataIT extends ParallelStatsDisabledIT {
Table htable =
pconn.getQueryServices()
- .getTable(TableName.valueOf(SchemaUtil.getTableNameAsBytes(schemaName, tableName)));
+ .getTable(SchemaUtil.getTableNameAsBytes(schemaName, tableName));
Put put = new Put(Bytes.toBytes("0"));
put.addColumn(cfB, Bytes.toBytes("COL1"), PInteger.INSTANCE.toBytes(1));
put.addColumn(cfC, Bytes.toBytes("COL2"), PLong.INSTANCE.toBytes(2));
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3ec80d3/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
index d785063..fc8c474 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SetPropertyIT.java
@@ -17,7 +17,6 @@
*/
package org.apache.phoenix.end2end;
-import static org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -29,11 +28,13 @@ import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.PTable;
@@ -95,7 +96,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " SET REPLICATION_SCOPE=1";
conn1.createStatement().execute(ddl);
try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -118,10 +119,10 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED=FALSE";
conn1.createStatement().execute(ddl);
try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
assertEquals(1, tableDesc.getColumnFamilies().length);
assertEquals("0", tableDesc.getColumnFamilies()[0].getNameAsString());
- assertEquals(Boolean.toString(false), tableDesc.getValue(HTableDescriptor.COMPACTION_ENABLED));
+ assertEquals(Boolean.toString(false), tableDesc.getValue(TableDescriptorBuilder.COMPACTION_ENABLED));
}
}
@@ -140,8 +141,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED = FALSE, REPLICATION_SCOPE = 1";
conn1.createStatement().execute(ddl);
try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(1, columnFamilies[0].getScope());
@@ -169,8 +170,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(3, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -188,7 +189,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
assertEquals(10, columnFamilies[2].getMaxVersions());
assertEquals(KeepDeletedCells.FALSE, columnFamilies[2].getKeepDeletedCells());
- assertEquals(Boolean.toString(false), tableDesc.getValue(HTableDescriptor.COMPACTION_ENABLED));
+ assertEquals(Boolean.toString(false), tableDesc.getValue(TableDescriptorBuilder.COMPACTION_ENABLED));
}
}
@@ -388,16 +389,16 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
assertImmutableRows(conn, dataTableFullName, true);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("CF", columnFamilies[0].getNameAsString());
assertEquals(1, columnFamilies[0].getScope());
assertEquals(1000, columnFamilies[0].getTimeToLive());
assertEquals("XYZ", columnFamilies[1].getNameAsString());
- assertEquals(DEFAULT_REPLICATION_SCOPE, columnFamilies[1].getScope());
+ assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_REPLICATION_SCOPE, columnFamilies[1].getScope());
assertEquals(1000, columnFamilies[1].getTimeToLive());
- assertEquals(Boolean.toString(false), tableDesc.getValue(HTableDescriptor.COMPACTION_ENABLED));
+ assertEquals(Boolean.toString(false), tableDesc.getValue(TableDescriptorBuilder.COMPACTION_ENABLED));
}
}
@@ -419,7 +420,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(
"ALTER TABLE " + dataTableFullName + " ADD CF.col3 integer CF.IN_MEMORY=true");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -448,7 +449,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
+ dataTableFullName
+ " ADD col4 integer, CF1.col5 integer, CF2.col6 integer IN_MEMORY=true, CF1.REPLICATION_SCOPE=1, CF2.IN_MEMORY=false ");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(3, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -482,7 +483,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
+ dataTableFullName
+ " ADD col4 integer, CF1.col5 integer, CF2.col6 integer IN_MEMORY=true, CF1.REPLICATION_SCOPE=1, CF2.IN_MEMORY=false, XYZ.REPLICATION_SCOPE=1 ");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(3, columnFamilies.length);
assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -539,7 +540,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
+ dataTableFullName
+ " ADD col4 integer, CF1.col5 integer, CF2.col6 integer, CF3.col7 integer CF1.REPLICATION_SCOPE=1, CF1.IN_MEMORY=false, IN_MEMORY=true ");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(4, columnFamilies.length);
assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -575,7 +576,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement()
.execute("ALTER TABLE " + dataTableFullName + " ADD XYZ.col5 integer IN_MEMORY=true ");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("CF1", columnFamilies[0].getNameAsString());
@@ -602,7 +603,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD col2 integer IN_MEMORY=true");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName))
+ ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName))
.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -632,9 +633,9 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
assertTrue(tableDesc.isCompactionEnabled());
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(5, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertFalse(columnFamilies[0].isInMemory());
@@ -671,9 +672,9 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
assertTrue(tableDesc.isCompactionEnabled());
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(3, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertFalse(columnFamilies[0].isInMemory());
@@ -748,8 +749,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
+ " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'");
conn.createStatement().execute(ddl);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals("XYZ", columnFamilies[0].getNameAsString());
assertEquals(86400, columnFamilies[0].getTimeToLive());
@@ -758,8 +759,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(30, columnFamilies[0].getTimeToLive());
assertEquals("XYZ", columnFamilies[0].getNameAsString());
@@ -786,8 +787,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals("XYZ", columnFamilies[0].getNameAsString());
@@ -814,8 +815,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals("0", columnFamilies[0].getNameAsString());
@@ -842,8 +843,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals("XYZ", columnFamilies[0].getNameAsString());
@@ -870,8 +871,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
@@ -900,8 +901,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
@@ -915,8 +916,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
@@ -933,8 +934,8 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
conn.createStatement().execute(ddl);
conn.commit();
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
- HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
+ ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
@@ -970,7 +971,7 @@ public abstract class SetPropertyIT extends ParallelStatsDisabledIT {
ddl = "ALTER TABLE " + dataTableFullName + " SET UNKNOWN_PROP='ABC'";
conn.createStatement().execute(ddl);
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
- HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
+ TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
assertEquals("ABC", tableDesc.getValue("UNKNOWN_PROP"));
}
} finally {