You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2014/09/30 03:40:02 UTC
[3/3] git commit: HBASE-12042 Replace internal uses of
HTable(Configuration, String) with HTable(Configuration,
TableName) (Solomon Duskis)
HBASE-12042 Replace internal uses of HTable(Configuration, String) with HTable(Configuration, TableName) (Solomon Duskis)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6189b52f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6189b52f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6189b52f
Branch: refs/heads/master
Commit: 6189b52fb0cb6a991ac4837f4e76b83b788fa904
Parents: 683f3b3
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Sep 29 18:39:44 2014 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Sep 29 18:39:44 2014 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/MetaTableAccessor.java | 13 ++
.../org/apache/hadoop/hbase/client/Admin.java | 2 +-
.../example/TestRowCountEndpoint.java | 3 +-
.../actions/RestartRsHoldingTableAction.java | 3 +-
.../mapreduce/IntegrationTestImportTsv.java | 16 +-
.../test/IntegrationTestLoadAndVerify.java | 2 +-
.../hbase/mapreduce/HRegionPartitioner.java | 6 +-
.../apache/hadoop/hbase/mapreduce/Import.java | 7 +-
.../hadoop/hbase/mapreduce/ImportTsv.java | 11 +-
.../hbase/mapreduce/TableInputFormat.java | 3 +-
.../hbase/mapreduce/TableOutputFormat.java | 3 +-
.../hadoop/hbase/mapreduce/WALPlayer.java | 2 +-
.../replication/VerifyReplication.java | 3 +-
.../org/apache/hadoop/hbase/tool/Canary.java | 11 +-
.../hadoop/hbase/HBaseTestingUtility.java | 18 +-
.../hadoop/hbase/TestFullLogReconstruction.java | 2 +-
.../hadoop/hbase/TestGlobalMemStoreSize.java | 2 +-
.../hadoop/hbase/TestHBaseTestingUtility.java | 2 +-
.../apache/hadoop/hbase/TestInfoServers.java | 7 +-
.../apache/hadoop/hbase/TestMultiVersions.java | 4 +-
.../org/apache/hadoop/hbase/TestZooKeeper.java | 4 +-
.../apache/hadoop/hbase/client/TestAdmin.java | 43 +++--
.../client/TestClientScannerRPCTimeout.java | 2 +-
.../hadoop/hbase/client/TestFromClientSide.java | 135 +++++++-------
.../hbase/client/TestFromClientSide3.java | 10 +-
.../hadoop/hbase/client/TestMultiParallel.java | 7 +-
.../hadoop/hbase/client/TestScannerTimeout.java | 3 +-
.../client/TestScannersFromClientSide.java | 5 +-
.../client/TestSnapshotCloneIndependence.java | 2 +-
.../hbase/client/TestTimestampsFilter.java | 6 +-
.../hbase/coprocessor/TestHTableWrapper.java | 6 +-
.../coprocessor/TestOpenTableInCoprocessor.java | 2 +-
.../hbase/filter/TestColumnRangeFilter.java | 2 +-
.../TestFuzzyRowAndColumnRangeFilter.java | 3 +-
.../hadoop/hbase/fs/TestBlockReorder.java | 3 +-
.../hbase/io/encoding/TestChangingEncoding.java | 10 +-
.../hbase/mapred/TestTableInputFormat.java | 2 +-
.../hadoop/hbase/mapred/TestTableMapReduce.java | 8 +-
.../hbase/mapred/TestTableMapReduceUtil.java | 5 +-
.../hadoop/hbase/mapreduce/TestCopyTable.java | 17 +-
.../hbase/mapreduce/TestHRegionPartitioner.java | 5 +-
.../hbase/mapreduce/TestImportExport.java | 22 +--
.../TestImportTSVWithOperationAttributes.java | 9 +-
.../TestImportTSVWithVisibilityLabels.java | 22 ++-
.../hadoop/hbase/mapreduce/TestImportTsv.java | 13 +-
.../TestLoadIncrementalHFilesSplitRecovery.java | 54 +++---
.../mapreduce/TestMultiTableInputFormat.java | 4 +-
.../mapreduce/TestMultithreadedTableMapper.java | 10 +-
.../hadoop/hbase/mapreduce/TestRowCounter.java | 4 +-
.../mapreduce/TestTableInputFormatScanBase.java | 3 +-
.../hbase/mapreduce/TestTableMapReduce.java | 2 +-
.../hbase/mapreduce/TestTableMapReduceBase.java | 3 +-
.../hadoop/hbase/mapreduce/TestWALPlayer.java | 8 +-
.../hadoop/hbase/master/TestMasterFailover.java | 2 +-
.../hbase/master/TestMasterTransitions.java | 13 +-
.../hadoop/hbase/master/TestRestartCluster.java | 24 +--
.../hbase/regionserver/TestCompactionState.java | 5 +-
.../TestEncryptionRandomKeying.java | 6 +-
.../regionserver/TestHRegionServerBulkLoad.java | 23 ++-
.../hbase/regionserver/TestRegionReplicas.java | 2 +-
.../regionserver/TestRegionServerMetrics.java | 14 +-
.../regionserver/TestRegionServerNoMaster.java | 6 +-
.../regionserver/TestScannerWithBulkload.java | 11 +-
.../regionserver/TestServerCustomProtocol.java | 3 +-
.../TestSplitTransactionOnCluster.java | 69 +++----
.../regionserver/wal/TestLogRollAbort.java | 8 +-
.../regionserver/wal/TestLogRollPeriod.java | 10 +-
.../hbase/regionserver/wal/TestLogRolling.java | 26 ++-
.../regionserver/TestReplicationSink.java | 15 +-
.../TestVisibilityLabelsWithDeletes.java | 182 +++++++++----------
.../TestVisibilityWithCheckAuths.java | 12 +-
.../hadoop/hbase/trace/TestHTraceHooks.java | 3 +-
.../apache/hadoop/hbase/util/TestHBaseFsck.java | 6 +-
.../hbase/util/TestHBaseFsckEncryption.java | 6 +-
.../hadoop/hbase/util/TestRegionSplitter.java | 24 ++-
75 files changed, 521 insertions(+), 503 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 5c01d93..45a561a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -872,8 +872,21 @@ public class MetaTableAccessor {
* @return Count or regions in table <code>tableName</code>
* @throws IOException
*/
+ @Deprecated
public static int getRegionCount(final Configuration c, final String tableName)
throws IOException {
+ return getRegionCount(c, TableName.valueOf(tableName));
+ }
+
+ /**
+ * Count regions in <code>hbase:meta</code> for passed table.
+ * @param c Configuration object
+ * @param tableName table name to count regions for
+ * @return Count or regions in table <code>tableName</code>
+ * @throws IOException
+ */
+ public static int getRegionCount(final Configuration c, final TableName tableName)
+ throws IOException {
HTable t = new HTable(c, tableName);
try {
return t.getRegionLocations().size();
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 722b5c2..5f2e515 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -128,7 +128,7 @@ public interface Admin extends Abortable, Closeable {
/**
* Method for getting the tableDescriptor
*
- * @param tableName as a byte []
+ * @param tableName as a {@link TableName}
* @return the tableDescriptor
* @throws org.apache.hadoop.hbase.TableNotFoundException
* @throws IOException if a remote or network exception occurs
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
index 98dca59..b28c247 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.coprocessor.example;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
@@ -60,7 +61,7 @@ public class TestRowCountEndpoint {
RowCountEndpoint.class.getName());
TEST_UTIL.startMiniCluster();
- TEST_UTIL.createTable(TEST_TABLE, TEST_FAMILY);
+ TEST_UTIL.createTable(TableName.valueOf(TEST_TABLE), new byte[][]{TEST_FAMILY});
}
// @Ignore @AfterClass
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java
index b6c1f2e..ffeab0e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java
@@ -24,6 +24,7 @@ import java.util.Collection;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
/**
@@ -43,7 +44,7 @@ public class RestartRsHoldingTableAction extends RestartActionBaseAction {
HTable table = null;
try {
Configuration conf = context.getHBaseIntegrationTestingUtility().getConfiguration();
- table = new HTable(conf, tableName);
+ table = new HTable(conf, TableName.valueOf(tableName));
} catch (IOException e) {
LOG.debug("Error creating HTable used to get list of region locations.", e);
return;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index b13d670..4b07f8f 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -129,10 +130,10 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
* Verify the data described by <code>simple_tsv</code> matches
* <code>simple_expected</code>.
*/
- protected void doLoadIncrementalHFiles(Path hfiles, String tableName)
+ protected void doLoadIncrementalHFiles(Path hfiles, TableName tableName)
throws Exception {
- String[] args = { hfiles.toString(), tableName };
+ String[] args = { hfiles.toString(), tableName.getNameAsString() };
LOG.info(format("Running LoadIncrememntalHFiles with args: %s", Arrays.asList(args)));
assertEquals("Loading HFiles failed.",
0, ToolRunner.run(new LoadIncrementalHFiles(new Configuration(getConf())), args));
@@ -181,9 +182,10 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
@Test
public void testGenerateAndLoad() throws Exception {
LOG.info("Running test testGenerateAndLoad.");
- String table = NAME + "-" + UUID.randomUUID();
+ TableName table = TableName.valueOf(NAME + "-" + UUID.randomUUID());
String cf = "d";
- Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
+ Path hfiles = new Path(
+ util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles");
String[] args = {
format("-D%s=%s", ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles),
@@ -192,11 +194,11 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
// configure the test harness to NOT delete the HFiles after they're
// generated. We need those for doLoadIncrementalHFiles
format("-D%s=false", TestImportTsv.DELETE_AFTER_LOAD_CONF),
- table
+ table.getNameAsString()
};
// run the job, complete the load.
- util.createTable(table, cf);
+ util.createTable(table, new String[]{cf});
Tool t = TestImportTsv.doMROnTableTest(util, cf, simple_tsv, args);
doLoadIncrementalHFiles(hfiles, table);
@@ -205,7 +207,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
// clean up after ourselves.
util.deleteTable(table);
- util.cleanupDataTestDirOnTestFS(table);
+ util.cleanupDataTestDirOnTestFS(table.getNameAsString());
LOG.info("testGenerateAndLoad completed successfully.");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index 44f9e4a..5c9a9ad 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -179,7 +179,7 @@ public void cleanUpCluster() throws Exception {
recordsToWrite = conf.getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT);
String tableName = conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT);
numBackReferencesPerRow = conf.getInt(NUM_BACKREFS_KEY, NUM_BACKREFS_DEFAULT);
- table = new HTable(conf, tableName);
+ table = new HTable(conf, TableName.valueOf(tableName));
table.setWriteBufferSize(4*1024*1024);
table.setAutoFlush(false, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
index d572839..f88d959 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -122,8 +123,9 @@ implements Configurable {
public void setConf(Configuration configuration) {
this.conf = HBaseConfiguration.create(configuration);
try {
- this.table = new HTable(this.conf,
- configuration.get(TableOutputFormat.OUTPUT_TABLE));
+ TableName tableName = TableName.valueOf(configuration
+ .get(TableOutputFormat.OUTPUT_TABLE));
+ this.table = new HTable(this.conf, tableName);
} catch (IOException e) {
LOG.error(e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index dbac568..c2453fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -401,8 +402,8 @@ public class Import extends Configured implements Tool {
*/
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException {
- String tableName = args[0];
- conf.set(TABLE_NAME, tableName);
+ TableName tableName = TableName.valueOf(args[0]);
+ conf.set(TABLE_NAME, tableName.getNameAsString());
Path inputDir = new Path(args[1]);
Job job = Job.getInstance(conf, conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));
job.setJarByClass(Importer.class);
@@ -435,7 +436,7 @@ public class Import extends Configured implements Tool {
// No reducers. Just write straight to table. Call initTableReducerJob
// because it sets up the TableOutputFormat.
job.setMapperClass(Importer.class);
- TableMapReduceUtil.initTableReducerJob(tableName, null, job);
+ TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
job.setNumReduceTasks(0);
}
return job;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index 12a2d4c..a953c3e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -414,9 +414,9 @@ public class ImportTsv extends Configured implements Tool {
Class mapperClass = mapperClassName != null ?
Class.forName(mapperClassName) : DEFAULT_MAPPER;
- String tableName = args[0];
+ TableName tableName = TableName.valueOf(args[0]);
Path inputDir = new Path(args[1]);
- String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName);
+ String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName.getNameAsString());
Job job = Job.getInstance(conf, jobName);
job.setJarByClass(mapperClass);
FileInputFormat.setInputPaths(job, inputDir);
@@ -460,7 +460,8 @@ public class ImportTsv extends Configured implements Tool {
}
// No reducers. Just write straight to table. Call initTableReducerJob
// to set up the TableOutputFormat.
- TableMapReduceUtil.initTableReducerJob(tableName, null, job);
+ TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null,
+ job);
job.setNumReduceTasks(0);
}
@@ -470,9 +471,9 @@ public class ImportTsv extends Configured implements Tool {
return job;
}
- private static void createTable(Admin admin, String tableName, String[] columns)
+ private static void createTable(Admin admin, TableName tableName, String[] columns)
throws IOException {
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
+ HTableDescriptor htd = new HTableDescriptor(tableName);
Set<String> cfSet = new HashSet<String>();
for (String aColumn : columns) {
if (TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index 40fb21e..c7fa29e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
@@ -96,7 +97,7 @@ implements Configurable {
@Override
public void setConf(Configuration configuration) {
this.conf = configuration;
- String tableName = conf.get(INPUT_TABLE);
+ TableName tableName = TableName.valueOf(conf.get(INPUT_TABLE));
try {
setHTable(new HTable(new Configuration(conf), tableName));
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
index 454bc28..52b8e45 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Mutation;
@@ -204,7 +205,7 @@ implements Configurable {
if (zkClientPort != 0) {
this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zkClientPort);
}
- this.table = new HTable(this.conf, tableName);
+ this.table = new HTable(this.conf, TableName.valueOf(tableName));
this.table.setAutoFlush(false, true);
LOG.info("Created table instance for " + tableName);
} catch(IOException e) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 290cf8c..cf9dc56 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -247,7 +247,7 @@ public class WALPlayer extends Configured implements Tool {
if (tables.length != 1) {
throw new IOException("Exactly one table must be specified for the bulk export option");
}
- HTable table = new HTable(conf, tables[0]);
+ HTable table = new HTable(conf, TableName.valueOf(tables[0]));
job.setMapperClass(HLogKeyValueMapper.class);
job.setReducerClass(KeyValueSortReducer.class);
Path outputDir = new Path(hfileOutPath);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index 8f6b217..f425ba8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -123,7 +123,8 @@ public class VerifyReplication extends Configured implements Tool {
Configuration peerConf = HBaseConfiguration.create(conf);
ZKUtil.applyClusterKeyToConf(peerConf, zkClusterKey);
- Table replicatedTable = new HTable(peerConf, conf.get(NAME + ".tableName"));
+ TableName tableName = TableName.valueOf(conf.get(NAME + ".tableName"));
+ Table replicatedTable = new HTable(peerConf, tableName);
scan.setStartRow(value.getRow());
replicatedScanner = replicatedTable.getScanner(scan);
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 9b11291..d5cdb39 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -606,7 +606,7 @@ public final class Canary implements Tool {
private void monitorRegionServers(Map<String, List<HRegionInfo>> rsAndRMap) {
String serverName = null;
- String tableName = null;
+ TableName tableName = null;
HRegionInfo region = null;
Table table = null;
Get get = null;
@@ -620,7 +620,7 @@ public final class Canary implements Tool {
// always get the first region
region = entry.getValue().get(0);
try {
- tableName = region.getTable().getNameAsString();
+ tableName = region.getTable();
table = new HTable(this.admin.getConfiguration(), tableName);
startKey = region.getStartKey();
// Can't do a get on empty start row so do a Scan of first element if any instead.
@@ -638,17 +638,18 @@ public final class Canary implements Tool {
s.close();
stopWatch.stop();
}
- this.getSink().publishReadTiming(tableName, serverName, stopWatch.getTime());
+ this.getSink().publishReadTiming(tableName.getNameAsString(),
+ serverName, stopWatch.getTime());
} catch (TableNotFoundException tnfe) {
// This is ignored because it doesn't imply that the regionserver is dead
} catch (TableNotEnabledException tnee) {
// This is considered a success since we got a response.
LOG.debug("The targeted table was disabled. Assuming success.");
} catch (DoNotRetryIOException dnrioe) {
- this.getSink().publishReadFailure(tableName, serverName);
+ this.getSink().publishReadFailure(tableName.getNameAsString(), serverName);
LOG.error(dnrioe);
} catch (IOException e) {
- this.getSink().publishReadFailure(tableName, serverName);
+ this.getSink().publishReadFailure(tableName.getNameAsString(), serverName);
LOG.error(e);
this.errorCode = ERROR_EXIT_CODE;
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 70ef0ba..de530aa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1071,9 +1071,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
* @return An HTable instance for the created table.
* @throws IOException
*/
- public HTable createTable(String tableName, String family)
+ public HTable createTable(TableName tableName, String family)
throws IOException{
- return createTable(TableName.valueOf(tableName), new String[]{family});
+ return createTable(tableName, new String[]{family});
}
/**
@@ -2226,20 +2226,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
* @throws IOException
* @throws InterruptedException
*/
- public HRegionServer getRSForFirstRegionInTable(byte[] tableName)
- throws IOException, InterruptedException {
- return getRSForFirstRegionInTable(TableName.valueOf(tableName));
- }
- /**
- * Tool to get the reference to the region server object that holds the
- * region of the specified user table.
- * It first searches for the meta rows that contain the region of the
- * specified table, then gets the index of that RS, and finally retrieves
- * the RS's reference.
- * @param tableName user table to lookup in hbase:meta
- * @return region server that holds it, null if the row doesn't exist
- * @throws IOException
- */
public HRegionServer getRSForFirstRegionInTable(TableName tableName)
throws IOException, InterruptedException {
List<byte[]> metaRows = getMetaTableRows(tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
index f2ac593..59ddfd7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
@@ -39,7 +39,7 @@ public class TestFullLogReconstruction {
private final static HBaseTestingUtility
TEST_UTIL = new HBaseTestingUtility();
- private final static byte[] TABLE_NAME = Bytes.toBytes("tabletest");
+ private final static TableName TABLE_NAME = TableName.valueOf("tabletest");
private final static byte[] FAMILY = Bytes.toBytes("family");
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
index 4db8f30..7be5074 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
@@ -73,7 +73,7 @@ public class TestGlobalMemStoreSize {
byte [] table = Bytes.toBytes("TestGlobalMemStoreSize");
byte [] family = Bytes.toBytes("family");
LOG.info("Creating table with " + regionNum + " regions");
- HTable ht = TEST_UTIL.createTable(table, family);
+ HTable ht = TEST_UTIL.createTable(TableName.valueOf(table), family);
int numRegions = TEST_UTIL.createMultiRegions(conf, ht, family,
regionNum);
assertEquals(regionNum,numRegions);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index 4eac224..abbcb4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -83,7 +83,7 @@ public class TestHBaseTestingUtility {
htu2.startMiniCluster();
htu3.startMiniCluster();
- final byte[] TABLE_NAME = Bytes.toBytes("test");
+ final TableName TABLE_NAME = TableName.valueOf("test");
final byte[] FAM_NAME = Bytes.toBytes("fam");
final byte[] ROW = Bytes.toBytes("row");
final byte[] QUAL_NAME = Bytes.toBytes("qual");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
index ec9cc76..62b00d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
@@ -99,17 +99,16 @@ public class TestInfoServers {
@Test
public void testMasterServerReadOnly() throws Exception {
- String sTableName = "testMasterServerReadOnly";
- byte[] tableName = Bytes.toBytes(sTableName);
+ TableName tableName = TableName.valueOf("testMasterServerReadOnly");
byte[] cf = Bytes.toBytes("d");
UTIL.createTable(tableName, cf);
new HTable(UTIL.getConfiguration(), tableName).close();
int port = UTIL.getHBaseCluster().getMaster().getInfoServer().getPort();
assertDoesNotContainContent(
- new URL("http://localhost:" + port + "/table.jsp?name=" + sTableName + "&action=split&key="),
+ new URL("http://localhost:" + port + "/table.jsp?name=" + tableName + "&action=split&key="),
"Table action request accepted");
assertDoesNotContainContent(
- new URL("http://localhost:" + port + "/table.jsp?name=" + sTableName),
+ new URL("http://localhost:" + port + "/table.jsp?name=" + tableName),
"Actions:");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 8333954..6c70384 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -144,7 +144,7 @@ public class TestMultiVersions {
this.admin.createTable(desc);
Put put = new Put(row, timestamp1);
put.add(contents, contents, value1);
- Table table = new HTable(UTIL.getConfiguration(), tableName);
+ Table table = new HTable(UTIL.getConfiguration(), desc.getTableName());
table.put(put);
// Shut down and restart the HBase cluster
table.close();
@@ -153,7 +153,7 @@ public class TestMultiVersions {
UTIL.startMiniHBaseCluster(1, NUM_SLAVES);
// Make a new connection. Use new Configuration instance because old one
// is tied to an HConnection that has since gone stale.
- table = new HTable(new Configuration(UTIL.getConfiguration()), tableName);
+ table = new HTable(new Configuration(UTIL.getConfiguration()), desc.getTableName());
// Overwrite previous value
put = new Put(row, timestamp2);
put.add(contents, contents, value2);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index a0a810e..1944b61 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -261,7 +261,7 @@ public class TestZooKeeper {
}
Table table =
- new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName);
+ new HTable(new Configuration(TEST_UTIL.getConfiguration()), desc.getTableName());
Put put = new Put(Bytes.toBytes("testrow"));
put.add(Bytes.toBytes("fam"),
Bytes.toBytes("col"), Bytes.toBytes("testdata"));
@@ -543,7 +543,7 @@ public class TestZooKeeper {
htd.addFamily(hcd);
admin.createTable(htd, SPLIT_KEYS);
TEST_UTIL.waitUntilNoRegionsInTransition(60000);
- table = new HTable(TEST_UTIL.getConfiguration(), tableName);
+ table = new HTable(TEST_UTIL.getConfiguration(), htd.getTableName());
Put p;
int numberOfPuts;
for (numberOfPuts = 0; numberOfPuts < 6; numberOfPuts++) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
index 937ebe7..7f93780 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
@@ -300,8 +300,8 @@ public class TestAdmin {
final byte [] row = Bytes.toBytes("row");
final byte [] qualifier = Bytes.toBytes("qualifier");
final byte [] value = Bytes.toBytes("value");
- final byte [] table1 = Bytes.toBytes("testDisableAndEnableTable1");
- final byte [] table2 = Bytes.toBytes("testDisableAndEnableTable2");
+ final TableName table1 = TableName.valueOf("testDisableAndEnableTable1");
+ final TableName table2 = TableName.valueOf("testDisableAndEnableTable2");
Table ht1 = TEST_UTIL.createTable(table1, HConstants.CATALOG_FAMILY);
Table ht2 = TEST_UTIL.createTable(table2, HConstants.CATALOG_FAMILY);
Put put = new Put(row);
@@ -350,8 +350,7 @@ public class TestAdmin {
public void testCreateTable() throws IOException {
HTableDescriptor [] tables = admin.listTables();
int numTables = tables.length;
- TEST_UTIL.createTable(Bytes.toBytes("testCreateTable"),
- HConstants.CATALOG_FAMILY).close();
+ TEST_UTIL.createTable(TableName.valueOf("testCreateTable"), HConstants.CATALOG_FAMILY).close();
tables = this.admin.listTables();
assertEquals(numTables + 1, tables.length);
assertTrue("Table must be enabled.", TEST_UTIL.getHBaseCluster()
@@ -411,7 +410,7 @@ public class TestAdmin {
htd.addFamily(fam2);
htd.addFamily(fam3);
this.admin.createTable(htd);
- Table table = new HTable(TEST_UTIL.getConfiguration(), "myTestTable");
+ Table table = new HTable(TEST_UTIL.getConfiguration(), htd.getTableName());
HTableDescriptor confirmedHtd = table.getTableDescriptor();
assertEquals(htd.compareTo(confirmedHtd), 0);
table.close();
@@ -1227,7 +1226,7 @@ public class TestAdmin {
admin.createTable(htd1);
admin.createTable(htd2);
// Before fix, below would fail throwing a NoServerForRegionException.
- new HTable(TEST_UTIL.getConfiguration(), name).close();
+ new HTable(TEST_UTIL.getConfiguration(), htd2.getTableName()).close();
}
/***
@@ -1264,7 +1263,7 @@ public class TestAdmin {
*/
@Test (timeout=300000)
public void testReadOnlyTable() throws Exception {
- byte [] name = Bytes.toBytes("testReadOnlyTable");
+ TableName name = TableName.valueOf("testReadOnlyTable");
Table table = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY);
byte[] value = Bytes.toBytes("somedata");
// This used to use an empty row... That must have been a bug
@@ -1310,7 +1309,7 @@ public class TestAdmin {
*/
@Test (expected=TableExistsException.class, timeout=300000)
public void testTableExistsExceptionWithATable() throws IOException {
- final byte [] name = Bytes.toBytes("testTableExistsExceptionWithATable");
+ final TableName name = TableName.valueOf("testTableExistsExceptionWithATable");
TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY).close();
TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY);
}
@@ -1348,8 +1347,9 @@ public class TestAdmin {
*/
@Test (expected=TableNotFoundException.class, timeout=300000)
public void testTableNotFoundExceptionWithoutAnyTables() throws IOException {
- Table ht =
- new HTable(TEST_UTIL.getConfiguration(),"testTableNotFoundExceptionWithoutAnyTables");
+ TableName tableName = TableName
+ .valueOf("testTableNotFoundExceptionWithoutAnyTables");
+ Table ht = new HTable(TEST_UTIL.getConfiguration(), tableName);
ht.get(new Get("e".getBytes()));
}
@@ -1390,7 +1390,7 @@ public class TestAdmin {
createTableWithDefaultConf(TABLENAME);
HRegionInfo info = null;
- HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLENAME);
+ HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TableName.valueOf(TABLENAME));
List<HRegionInfo> onlineRegions = ProtobufUtil.getOnlineRegions(rs.getRSRpcServices());
for (HRegionInfo regionInfo : onlineRegions) {
if (!regionInfo.isMetaTable()) {
@@ -1448,7 +1448,7 @@ public class TestAdmin {
byte[] TABLENAME = Bytes.toBytes("TestHBACloseRegion3");
createTableWithDefaultConf(TABLENAME);
- HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLENAME);
+ HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TableName.valueOf(TABLENAME));
try {
List<HRegionInfo> onlineRegions = ProtobufUtil.getOnlineRegions(rs.getRSRpcServices());
@@ -1472,7 +1472,7 @@ public class TestAdmin {
byte[] TABLENAME = Bytes.toBytes("TestHBACloseRegionWhenServerNameIsEmpty");
createTableWithDefaultConf(TABLENAME);
- HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLENAME);
+ HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TableName.valueOf(TABLENAME));
try {
List<HRegionInfo> onlineRegions = ProtobufUtil.getOnlineRegions(rs.getRSRpcServices());
@@ -1496,7 +1496,7 @@ public class TestAdmin {
createTableWithDefaultConf(TABLENAME);
HRegionInfo info = null;
- HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLENAME);
+ HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TableName.valueOf(TABLENAME));
List<HRegionInfo> onlineRegions = ProtobufUtil.getOnlineRegions(rs.getRSRpcServices());
for (HRegionInfo regionInfo : onlineRegions) {
@@ -1579,7 +1579,7 @@ public class TestAdmin {
v.append(className);
}
byte[] value = Bytes.toBytes(v.toString());
- HRegionServer regionServer = startAndWriteData("TestLogRolling", value);
+ HRegionServer regionServer = startAndWriteData(TableName.valueOf("TestLogRolling"), value);
LOG.info("after writing there are "
+ HLogUtilsForTests.getNumRolledLogFiles(regionServer.getWAL()) + " log files");
@@ -1659,19 +1659,19 @@ public class TestAdmin {
"hbase.regionserver.hlog.lowreplication.rolllimit", 3);
}
- private HRegionServer startAndWriteData(String tableName, byte[] value)
+ private HRegionServer startAndWriteData(TableName tableName, byte[] value)
throws IOException, InterruptedException {
// When the hbase:meta table can be opened, the region servers are running
new HTable(
TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME).close();
// Create the test table and open it
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+ HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
admin.createTable(desc);
Table table = new HTable(TEST_UTIL.getConfiguration(), tableName);
- HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName));
+ HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(tableName);
for (int i = 1; i <= 256; i++) { // 256 writes should cause 8 log rolls
Put put = new Put(Bytes.toBytes("row" + String.format("%1$04d", i)));
put.add(HConstants.CATALOG_FAMILY, null, value);
@@ -1772,10 +1772,9 @@ public class TestAdmin {
// here because makes use of an internal HBA method (TODO: Fix.).
HBaseAdmin rawAdmin = new HBaseAdmin(TEST_UTIL.getConfiguration());
- final String name = "testGetRegion";
- LOG.info("Started " + name);
- final byte [] nameBytes = Bytes.toBytes(name);
- HTable t = TEST_UTIL.createTable(nameBytes, HConstants.CATALOG_FAMILY);
+ final TableName tableName = TableName.valueOf("testGetRegion");
+ LOG.info("Started " + tableName);
+ HTable t = TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY);
TEST_UTIL.createMultiRegions(t, HConstants.CATALOG_FAMILY);
HRegionLocation regionLocation = t.getRegionLocation("mmm");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index f8f7bb3..f8c093f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -84,7 +84,7 @@ public class TestClientScannerRPCTimeout {
@Test
public void testScannerNextRPCTimesout() throws Exception {
- final byte[] TABLE_NAME = Bytes.toBytes("testScannerNextRPCTimesout");
+ final TableName TABLE_NAME = TableName.valueOf("testScannerNextRPCTimesout");
Table ht = TEST_UTIL.createTable(TABLE_NAME, FAMILY);
byte[] r1 = Bytes.toBytes("row-1");
byte[] r2 = Bytes.toBytes("row-2");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 610c815..aa44647 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -182,7 +182,7 @@ public class TestFromClientSide {
HColumnDescriptor hcd = new HColumnDescriptor(FAMILY)
.setKeepDeletedCells(true).setMaxVersions(3);
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(TABLENAME));
+ HTableDescriptor desc = new HTableDescriptor(TABLENAME);
desc.addFamily(hcd);
TEST_UTIL.getHBaseAdmin().createTable(desc);
Configuration c = TEST_UTIL.getConfiguration();
@@ -385,7 +385,7 @@ public class TestFromClientSide {
*/
@Test
public void testGetConfiguration() throws Exception {
- byte[] TABLE = Bytes.toBytes("testGetConfiguration");
+ TableName TABLE = TableName.valueOf("testGetConfiguration");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("foo") };
Configuration conf = TEST_UTIL.getConfiguration();
Table table = TEST_UTIL.createTable(TABLE, FAMILIES, conf);
@@ -533,7 +533,7 @@ public class TestFromClientSide {
@Test
public void testFilterAcrossMultipleRegions()
throws IOException, InterruptedException {
- byte [] name = Bytes.toBytes("testFilterAcrossMutlipleRegions");
+ TableName name = TableName.valueOf("testFilterAcrossMutlipleRegions");
HTable t = TEST_UTIL.createTable(name, FAMILY);
int rowCount = TEST_UTIL.loadTable(t, FAMILY, false);
assertRowCount(t, rowCount);
@@ -1208,7 +1208,7 @@ public class TestFromClientSide {
// Null family (should NOT work)
try {
- TEST_UTIL.createTable(TABLE, (byte[])null);
+ TEST_UTIL.createTable(TABLE, new byte[][]{(byte[])null});
fail("Creating a table with a null family passed, should fail");
} catch(Exception e) {}
@@ -1243,7 +1243,7 @@ public class TestFromClientSide {
// Use a new table
byte [] TABLE2 = Bytes.toBytes("testNull2");
- ht = TEST_UTIL.createTable(TABLE2, FAMILY);
+ ht = TEST_UTIL.createTable(TableName.valueOf(TABLE2), FAMILY);
// Empty qualifier, byte[0] instead of null (should work)
try {
@@ -3597,8 +3597,7 @@ public class TestFromClientSide {
@Test
public void testUpdatesWithMajorCompaction() throws Exception {
- String tableName = "testUpdatesWithMajorCompaction";
- byte [] TABLE = Bytes.toBytes(tableName);
+ TableName TABLE = TableName.valueOf("testUpdatesWithMajorCompaction");
Table hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10);
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
@@ -3630,8 +3629,8 @@ public class TestFromClientSide {
assertEquals("BBB", Bytes.toString(navigableMap.get(2L)));
// Trigger a major compaction
- admin.flush(tableName);
- admin.majorCompact(tableName);
+ admin.flush(TABLE);
+ admin.majorCompact(TABLE);
Thread.sleep(6000);
// Update the value at timestamp 1
@@ -3645,8 +3644,8 @@ public class TestFromClientSide {
hTable.put(put);
// Trigger a major compaction
- admin.flush(tableName);
- admin.majorCompact(tableName);
+ admin.flush(TABLE);
+ admin.majorCompact(TABLE);
Thread.sleep(6000);
// Check that the values at timestamp 2 and 1 got updated
@@ -3726,7 +3725,7 @@ public class TestFromClientSide {
@Test
public void testGet_EmptyTable() throws IOException {
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testGet_EmptyTable"), FAMILY);
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_EmptyTable"), FAMILY);
Get get = new Get(ROW);
get.addFamily(FAMILY);
Result r = table.get(get);
@@ -3735,7 +3734,7 @@ public class TestFromClientSide {
@Test
public void testGet_NullQualifier() throws IOException {
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NullQualifier"), FAMILY);
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NullQualifier"), FAMILY);
Put put = new Put(ROW);
put.add(FAMILY, QUALIFIER, VALUE);
table.put(put);
@@ -3758,7 +3757,7 @@ public class TestFromClientSide {
@Test
public void testGet_NonExistentRow() throws IOException {
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NonExistentRow"), FAMILY);
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NonExistentRow"), FAMILY);
Put put = new Put(ROW);
put.add(FAMILY, QUALIFIER, VALUE);
table.put(put);
@@ -3822,7 +3821,7 @@ public class TestFromClientSide {
public void testPutNoCF() throws IOException {
final byte[] BAD_FAM = Bytes.toBytes("BAD_CF");
final byte[] VAL = Bytes.toBytes(100);
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testPutNoCF"), new byte[][]{FAMILY});
+ Table table = TEST_UTIL.createTable(Bytes.toBytes("testPutNoCF"), FAMILY);
boolean caughtNSCFE = false;
@@ -4064,10 +4063,10 @@ public class TestFromClientSide {
@Test
public void testListTables() throws IOException, InterruptedException {
- byte [] t1 = Bytes.toBytes("testListTables1");
- byte [] t2 = Bytes.toBytes("testListTables2");
- byte [] t3 = Bytes.toBytes("testListTables3");
- byte [][] tables = new byte[][] { t1, t2, t3 };
+ TableName t1 = TableName.valueOf("testListTables1");
+ TableName t2 = TableName.valueOf("testListTables2");
+ TableName t3 = TableName.valueOf("testListTables3");
+ TableName [] tables = new TableName[] { t1, t2, t3 };
for (int i = 0; i < tables.length; i++) {
TEST_UTIL.createTable(tables[i], FAMILY);
}
@@ -4080,12 +4079,12 @@ public class TestFromClientSide {
for (int i = 0; i < tables.length && i < size; i++) {
boolean found = false;
for (int j = 0; j < ts.length; j++) {
- if (Bytes.equals(ts[j].getTableName().getName(), tables[i])) {
+ if (ts[j].getTableName().equals(tables[i])) {
found = true;
break;
}
}
- assertTrue("Not found: " + Bytes.toString(tables[i]), found);
+ assertTrue("Not found: " + tables[i], found);
}
}
@@ -4096,7 +4095,7 @@ public class TestFromClientSide {
* @return the created HTable object
* @throws IOException
*/
- HTable createUnmangedHConnectionHTable(final byte [] tableName) throws IOException {
+ HTable createUnmangedHConnectionHTable(final TableName tableName) throws IOException {
TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY);
HConnection conn = HConnectionManager.createConnection(TEST_UTIL.getConfiguration());
return (HTable)conn.getTable(tableName);
@@ -4110,7 +4109,7 @@ public class TestFromClientSide {
*/
@Test
public void testUnmanagedHConnection() throws IOException {
- final byte[] tableName = Bytes.toBytes("testUnmanagedHConnection");
+ final TableName tableName = TableName.valueOf("testUnmanagedHConnection");
HTable t = createUnmangedHConnectionHTable(tableName);
HBaseAdmin ha = new HBaseAdmin(t.getConnection());
assertTrue(ha.tableExists(tableName));
@@ -4125,7 +4124,7 @@ public class TestFromClientSide {
*/
@Test
public void testUnmanagedHConnectionReconnect() throws Exception {
- final byte[] tableName = Bytes.toBytes("testUnmanagedHConnectionReconnect");
+ final TableName tableName = TableName.valueOf("testUnmanagedHConnectionReconnect");
HTable t = createUnmangedHConnectionHTable(tableName);
Connection conn = t.getConnection();
HBaseAdmin ha = new HBaseAdmin(conn);
@@ -4150,8 +4149,8 @@ public class TestFromClientSide {
@Test
public void testMiscHTableStuff() throws IOException {
- final byte[] tableAname = Bytes.toBytes("testMiscHTableStuffA");
- final byte[] tableBname = Bytes.toBytes("testMiscHTableStuffB");
+ final TableName tableAname = TableName.valueOf("testMiscHTableStuffA");
+ final TableName tableBname = TableName.valueOf("testMiscHTableStuffB");
final byte[] attrName = Bytes.toBytes("TESTATTR");
final byte[] attrValue = Bytes.toBytes("somevalue");
byte[] value = Bytes.toBytes("value");
@@ -4193,7 +4192,7 @@ public class TestFromClientSide {
// to be reloaded.
// Test user metadata
- HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
+ Admin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
// make a modifiable descriptor
HTableDescriptor desc = new HTableDescriptor(a.getTableDescriptor());
// offline the table
@@ -4211,7 +4210,7 @@ public class TestFromClientSide {
// Test that attribute changes were applied
desc = a.getTableDescriptor();
assertTrue("wrong table descriptor returned",
- Bytes.compareTo(desc.getTableName().getName(), tableAname) == 0);
+ desc.getTableName().equals(tableAname));
// check HTD attribute
value = desc.getValue(attrName);
assertFalse("missing HTD attribute value", value == null);
@@ -4228,7 +4227,7 @@ public class TestFromClientSide {
@Test
public void testGetClosestRowBefore() throws IOException, InterruptedException {
- final byte[] tableAname = Bytes.toBytes("testGetClosestRowBefore");
+ final TableName tableAname = TableName.valueOf("testGetClosestRowBefore");
final byte[] firstRow = Bytes.toBytes("row111");
final byte[] secondRow = Bytes.toBytes("row222");
final byte[] thirdRow = Bytes.toBytes("row333");
@@ -4342,7 +4341,7 @@ public class TestFromClientSide {
@Test
public void testMultiRowMutation() throws Exception {
LOG.info("Starting testMultiRowMutation");
- final byte [] TABLENAME = Bytes.toBytes("testMultiRowMutation");
+ final TableName TABLENAME = TableName.valueOf("testMultiRowMutation");
final byte [] ROW1 = Bytes.toBytes("testRow1");
Table t = TEST_UTIL.createTable(TABLENAME, FAMILY);
@@ -4373,7 +4372,7 @@ public class TestFromClientSide {
@Test
public void testRowMutation() throws Exception {
LOG.info("Starting testRowMutation");
- final byte [] TABLENAME = Bytes.toBytes("testRowMutation");
+ final TableName TABLENAME = TableName.valueOf("testRowMutation");
Table t = TEST_UTIL.createTable(TABLENAME, FAMILY);
byte [][] QUALIFIERS = new byte [][] {
Bytes.toBytes("a"), Bytes.toBytes("b")
@@ -4405,7 +4404,7 @@ public class TestFromClientSide {
@Test
public void testAppend() throws Exception {
LOG.info("Starting testAppend");
- final byte [] TABLENAME = Bytes.toBytes("testAppend");
+ final TableName TABLENAME = TableName.valueOf("testAppend");
Table t = TEST_UTIL.createTable(TABLENAME, FAMILY);
byte[] v1 = Bytes.toBytes("42");
byte[] v2 = Bytes.toBytes("23");
@@ -4456,7 +4455,7 @@ public class TestFromClientSide {
@Test
public void testIncrementingInvalidValue() throws Exception {
LOG.info("Starting testIncrementingInvalidValue");
- final byte [] TABLENAME = Bytes.toBytes("testIncrementingInvalidValue");
+ final TableName TABLENAME = TableName.valueOf("testIncrementingInvalidValue");
Table ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
final byte[] COLUMN = Bytes.toBytes("column");
Put p = new Put(ROW);
@@ -4482,7 +4481,7 @@ public class TestFromClientSide {
@Test
public void testIncrementInvalidArguments() throws Exception {
LOG.info("Starting testIncrementInvalidArguments");
- final byte[] TABLENAME = Bytes.toBytes("testIncrementInvalidArguments");
+ final TableName TABLENAME = TableName.valueOf("testIncrementInvalidArguments");
Table ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
final byte[] COLUMN = Bytes.toBytes("column");
try {
@@ -4537,7 +4536,7 @@ public class TestFromClientSide {
@Test
public void testIncrementOutOfOrder() throws Exception {
LOG.info("Starting testIncrementOutOfOrder");
- final byte [] TABLENAME = Bytes.toBytes("testIncrementOutOfOrder");
+ final TableName TABLENAME = TableName.valueOf("testIncrementOutOfOrder");
Table ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
byte [][] QUALIFIERS = new byte [][] {
@@ -4577,7 +4576,7 @@ public class TestFromClientSide {
@Test
public void testIncrement() throws Exception {
LOG.info("Starting testIncrement");
- final byte [] TABLENAME = Bytes.toBytes("testIncrement");
+ final TableName TABLENAME = TableName.valueOf("testIncrement");
Table ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
byte [][] ROWS = new byte [][] {
@@ -4648,7 +4647,7 @@ public class TestFromClientSide {
@Test
public void testClientPoolRoundRobin() throws IOException {
- final byte[] tableName = Bytes.toBytes("testClientPoolRoundRobin");
+ final TableName tableName = TableName.valueOf("testClientPoolRoundRobin");
int poolSize = 3;
int numVersions = poolSize * 2;
@@ -4656,8 +4655,7 @@ public class TestFromClientSide {
conf.set(HConstants.HBASE_CLIENT_IPC_POOL_TYPE, "round-robin");
conf.setInt(HConstants.HBASE_CLIENT_IPC_POOL_SIZE, poolSize);
- Table table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY },
- conf, Integer.MAX_VALUE);
+ Table table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY }, conf, Integer.MAX_VALUE);
final long ts = EnvironmentEdgeManager.currentTime();
Get get = new Get(ROW);
@@ -4685,7 +4683,7 @@ public class TestFromClientSide {
@Ignore ("Flakey: HBASE-8989") @Test
public void testClientPoolThreadLocal() throws IOException {
- final byte[] tableName = Bytes.toBytes("testClientPoolThreadLocal");
+ final TableName tableName = TableName.valueOf("testClientPoolThreadLocal");
int poolSize = Integer.MAX_VALUE;
int numVersions = 3;
@@ -4771,8 +4769,7 @@ public class TestFromClientSide {
final byte [] anotherrow = Bytes.toBytes("anotherrow");
final byte [] value2 = Bytes.toBytes("abcd");
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPut"),
- new byte [][] {FAMILY});
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testCheckAndPut"), FAMILY);
Put put1 = new Put(ROW);
put1.add(FAMILY, QUALIFIER, VALUE);
@@ -4813,8 +4810,7 @@ public class TestFromClientSide {
final byte [] value3 = Bytes.toBytes("cccc");
final byte [] value4 = Bytes.toBytes("dddd");
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPutWithCompareOp"),
- new byte [][] {FAMILY});
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testCheckAndPutWithCompareOp"), FAMILY);
Put put2 = new Put(ROW);
put2.add(FAMILY, QUALIFIER, value2);
@@ -4879,8 +4875,8 @@ public class TestFromClientSide {
final byte [] value3 = Bytes.toBytes("cccc");
final byte [] value4 = Bytes.toBytes("dddd");
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndDeleteWithCompareOp"),
- new byte [][] {FAMILY});
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testCheckAndDeleteWithCompareOp"),
+ FAMILY);
Put put2 = new Put(ROW);
put2.add(FAMILY, QUALIFIER, value2);
@@ -4953,7 +4949,7 @@ public class TestFromClientSide {
@Test
@SuppressWarnings ("unused")
public void testScanMetrics() throws Exception {
- byte [] TABLENAME = Bytes.toBytes("testScanMetrics");
+ TableName TABLENAME = TableName.valueOf("testScanMetrics");
Configuration conf = TEST_UTIL.getConfiguration();
TEST_UTIL.createTable(TABLENAME, FAMILY);
@@ -5057,13 +5053,12 @@ public class TestFromClientSide {
*/
@Test
public void testCacheOnWriteEvictOnClose() throws Exception {
- byte [] tableName = Bytes.toBytes("testCOWEOCfromClient");
+ TableName tableName = TableName.valueOf("testCOWEOCfromClient");
byte [] data = Bytes.toBytes("data");
- HTable table = TEST_UTIL.createTable(tableName, new byte [][] {FAMILY});
+ HTable table = TEST_UTIL.createTable(tableName, FAMILY);
// get the block cache and region
String regionName = table.getRegionLocations().firstKey().getEncodedName();
- HRegion region = TEST_UTIL.getRSForFirstRegionInTable(
- tableName).getFromOnlineRegions(regionName);
+ HRegion region = TEST_UTIL.getRSForFirstRegionInTable(tableName).getFromOnlineRegions(regionName);
Store store = region.getStores().values().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -5171,8 +5166,7 @@ public class TestFromClientSide {
*/
public void testNonCachedGetRegionLocation() throws Exception {
// Test Initialization.
- String tableName = "testNonCachedGetRegionLocation";
- byte [] TABLE = Bytes.toBytes(tableName);
+ TableName TABLE = TableName.valueOf("testNonCachedGetRegionLocation");
byte [] family1 = Bytes.toBytes("f1");
byte [] family2 = Bytes.toBytes("f2");
HTable table = TEST_UTIL.createTable(TABLE, new byte[][] {family1, family2}, 10);
@@ -5220,7 +5214,7 @@ public class TestFromClientSide {
// Test Initialization.
byte [] startKey = Bytes.toBytes("ddc");
byte [] endKey = Bytes.toBytes("mmm");
- byte [] TABLE = Bytes.toBytes("testGetRegionsInRange");
+ TableName TABLE = TableName.valueOf("testGetRegionsInRange");
HTable table = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10);
int numOfRegions = TEST_UTIL.createMultiRegions(table, FAMILY);
assertEquals(25, numOfRegions);
@@ -5271,7 +5265,7 @@ public class TestFromClientSide {
@Test
public void testJira6912() throws Exception {
- byte [] TABLE = Bytes.toBytes("testJira6912");
+ TableName TABLE = TableName.valueOf("testJira6912");
Table foo = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10);
List<Put> puts = new ArrayList<Put>();
@@ -5297,7 +5291,7 @@ public class TestFromClientSide {
@Test
public void testScan_NullQualifier() throws IOException {
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testScan_NullQualifier"), FAMILY);
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testScan_NullQualifier"), FAMILY);
Put put = new Put(ROW);
put.add(FAMILY, QUALIFIER, VALUE);
table.put(put);
@@ -5326,7 +5320,7 @@ public class TestFromClientSide {
@Test
public void testNegativeTimestamp() throws IOException {
- Table table = TEST_UTIL.createTable(Bytes.toBytes("testNegativeTimestamp"), FAMILY);
+ Table table = TEST_UTIL.createTable(TableName.valueOf("testNegativeTimestamp"), FAMILY);
try {
Put put = new Put(ROW, -1);
@@ -5470,8 +5464,8 @@ public class TestFromClientSide {
@Test
public void testRawScanRespectsVersions() throws Exception {
- byte[] TABLE = Bytes.toBytes("testRawScan");
- Table table = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY });
+ TableName TABLE = TableName.valueOf("testRawScan");
+ Table table = TEST_UTIL.createTable(TABLE, FAMILY);
byte[] row = Bytes.toBytes("row");
// put the same row 4 times, with different values
@@ -5546,7 +5540,7 @@ public class TestFromClientSide {
@Test
public void testSmallScan() throws Exception {
// Test Initialization.
- byte[] TABLE = Bytes.toBytes("testSmallScan");
+ TableName TABLE = TableName.valueOf("testSmallScan");
Table table = TEST_UTIL.createTable(TABLE, FAMILY);
// Insert one row each region
@@ -5582,7 +5576,7 @@ public class TestFromClientSide {
@Test
public void testSuperSimpleWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testSuperSimpleWithReverseScan");
+ TableName TABLE = TableName.valueOf("testSuperSimpleWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
Put put = new Put(Bytes.toBytes("0-b11111-0000000000000000000"));
put.add(FAMILY, QUALIFIER, VALUE);
@@ -5628,7 +5622,7 @@ public class TestFromClientSide {
@Test
public void testFiltersWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testFiltersWithReverseScan");
+ TableName TABLE = TableName.valueOf("testFiltersWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"),
@@ -5668,7 +5662,7 @@ public class TestFromClientSide {
@Test
public void testKeyOnlyFilterWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testKeyOnlyFilterWithReverseScan");
+ TableName TABLE = TableName.valueOf("testKeyOnlyFilterWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"),
@@ -5709,7 +5703,7 @@ public class TestFromClientSide {
*/
@Test
public void testSimpleMissingWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testSimpleMissingWithReverseScan");
+ TableName TABLE = TableName.valueOf("testSimpleMissingWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
byte[][] ROWS = makeN(ROW, 4);
@@ -5774,7 +5768,7 @@ public class TestFromClientSide {
@Test
public void testNullWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testNullWithReverseScan");
+ TableName TABLE = TableName.valueOf("testNullWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
// Null qualifier (should work)
Put put = new Put(ROW);
@@ -5786,7 +5780,7 @@ public class TestFromClientSide {
ht.delete(delete);
// Use a new table
byte[] TABLE2 = Bytes.toBytes("testNull2WithReverseScan");
- ht = TEST_UTIL.createTable(TABLE2, FAMILY);
+ ht = TEST_UTIL.createTable(TableName.valueOf(TABLE2), FAMILY);
// Empty qualifier, byte[0] instead of null (should work)
put = new Put(ROW);
put.add(FAMILY, HConstants.EMPTY_BYTE_ARRAY, VALUE);
@@ -5811,13 +5805,12 @@ public class TestFromClientSide {
@Test
public void testDeletesWithReverseScan() throws Exception {
- byte[] TABLE = Bytes.toBytes("testDeletesWithReverseScan");
+ TableName TABLE = TableName.valueOf("testDeletesWithReverseScan");
byte[][] ROWS = makeNAscii(ROW, 6);
byte[][] FAMILIES = makeNAscii(FAMILY, 3);
byte[][] VALUES = makeN(VALUE, 5);
long[] ts = { 1000, 2000, 3000, 4000, 5000 };
- Table ht = TEST_UTIL.createTable(TABLE, FAMILIES,
- TEST_UTIL.getConfiguration(), 3);
+ Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, TEST_UTIL.getConfiguration(), 3);
Put put = new Put(ROW);
put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
@@ -5998,7 +5991,7 @@ public class TestFromClientSide {
@Test
public void testReversedScanUnderMultiRegions() throws Exception {
// Test Initialization.
- byte[] TABLE = Bytes.toBytes("testReversedScanUnderMultiRegions");
+ TableName TABLE = TableName.valueOf("testReversedScanUnderMultiRegions");
byte[] maxByteArray = ReversedClientScanner.MAX_BYTE_ARRAY;
byte[][] splitRows = new byte[][] { Bytes.toBytes("005"),
Bytes.add(Bytes.toBytes("005"), Bytes.multiple(maxByteArray, 16)),
@@ -6055,7 +6048,7 @@ public class TestFromClientSide {
@Test
public void testSmallReversedScanUnderMultiRegions() throws Exception {
// Test Initialization.
- byte[] TABLE = Bytes.toBytes("testSmallReversedScanUnderMultiRegions");
+ TableName TABLE = TableName.valueOf("testSmallReversedScanUnderMultiRegions");
byte[][] splitRows = new byte[][]{
Bytes.toBytes("000"), Bytes.toBytes("002"), Bytes.toBytes("004"),
Bytes.toBytes("006"), Bytes.toBytes("008"), Bytes.toBytes("010")};
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 7b15544..31bc135 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -338,8 +338,8 @@ public class TestFromClientSide3 {
public void testHTableExistsMethodMultipleRegionsSingleGet() throws Exception {
Table table = TEST_UTIL.createTable(
- Bytes.toBytes("testHTableExistsMethodMultipleRegionsSingleGet"), new byte[][] { FAMILY }, 1,
- new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255);
+ TableName.valueOf("testHTableExistsMethodMultipleRegionsSingleGet"), new byte[][] { FAMILY },
+ 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255);
Put put = new Put(ROW);
put.add(FAMILY, QUALIFIER, VALUE);
@@ -357,8 +357,8 @@ public class TestFromClientSide3 {
@Test
public void testHTableExistsMethodMultipleRegionsMultipleGets() throws Exception {
HTable table = TEST_UTIL.createTable(
- Bytes.toBytes("testHTableExistsMethodMultipleRegionsMultipleGets"), new byte[][] { FAMILY },
- 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255);
+ TableName.valueOf("testHTableExistsMethodMultipleRegionsMultipleGets"),
+ new byte[][] { FAMILY }, 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255);
Put put = new Put(ROW);
put.add(FAMILY, QUALIFIER, VALUE);
table.put (put);
@@ -410,7 +410,7 @@ public class TestFromClientSide3 {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(Bytes.toBytes("test")));
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
- Table table = new HTable(TEST_UTIL.getConfiguration(), "test");
+ Table table = new HTable(TEST_UTIL.getConfiguration(), desc.getTableName());
Put put = new Put(ROW_BYTES);
put.add(FAMILY, COL_QUAL, VAL_BYTES);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index 1558eaf..e4bf256 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.exceptions.OperationConflictException;
import org.apache.hadoop.hbase.ipc.RpcClient;
@@ -67,7 +68,7 @@ public class TestMultiParallel {
private static final byte[] VALUE = Bytes.toBytes("value");
private static final byte[] QUALIFIER = Bytes.toBytes("qual");
private static final String FAMILY = "family";
- private static final String TEST_TABLE = "multi_test_table";
+ private static final TableName TEST_TABLE = TableName.valueOf("multi_test_table");
private static final byte[] BYTES_FAMILY = Bytes.toBytes(FAMILY);
private static final byte[] ONE_ROW = Bytes.toBytes("xxx");
private static final byte [][] KEYS = makeKeys();
@@ -79,9 +80,9 @@ public class TestMultiParallel {
((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
UTIL.startMiniCluster(slaves);
- HTable t = UTIL.createTable(Bytes.toBytes(TEST_TABLE), Bytes.toBytes(FAMILY));
+ HTable t = UTIL.createTable(TEST_TABLE, Bytes.toBytes(FAMILY));
UTIL.createMultiRegions(t, Bytes.toBytes(FAMILY));
- UTIL.waitTableEnabled(Bytes.toBytes(TEST_TABLE));
+ UTIL.waitTableEnabled(TEST_TABLE.getName());
t.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
index 61de851..b46312f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -48,7 +49,7 @@ public class TestScannerTimeout {
final Log LOG = LogFactory.getLog(getClass());
private final static byte[] SOME_BYTES = Bytes.toBytes("f");
- private final static byte[] TABLE_NAME = Bytes.toBytes("t");
+ private final static TableName TABLE_NAME = TableName.valueOf("t");
private final static int NB_ROWS = 10;
// Be careful w/ what you set this timer to... it can get in the way of
// the mini cluster coming up -- the verification in particular.
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index 0165bac..a6c1cfe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
import org.apache.hadoop.hbase.master.HMaster;
@@ -101,7 +102,7 @@ public class TestScannersFromClientSide {
*/
@Test
public void testScanBatch() throws Exception {
- byte [] TABLE = Bytes.toBytes("testScanBatch");
+ TableName TABLE = TableName.valueOf("testScanBatch");
byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 8);
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -442,7 +443,7 @@ public class TestScannersFromClientSide {
*/
@Test
public void testScanOnReopenedRegion() throws Exception {
- byte [] TABLE = Bytes.toBytes("testScanOnReopenedRegion");
+ TableName TABLE = TableName.valueOf("testScanOnReopenedRegion");
byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 2);
HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
index a1ead55..1d9ff1e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
@@ -58,7 +58,7 @@ public class TestSnapshotCloneIndependence {
private static final String STRING_TABLE_NAME = "test";
private static final String TEST_FAM_STR = "fam";
private static final byte[] TEST_FAM = Bytes.toBytes(TEST_FAM_STR);
- private static final byte[] TABLE_NAME = Bytes.toBytes(STRING_TABLE_NAME);
+ private static final TableName TABLE_NAME = TableName.valueOf(STRING_TABLE_NAME);
/**
* Setup the config for the cluster and start it
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 12a0457..4843715 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -98,7 +98,7 @@ public class TestTimestampsFilter {
Cell kvs[];
// create table; set versions to max...
- Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE);
+ Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE);
for (int rowIdx = 0; rowIdx < 5; rowIdx++) {
for (int colIdx = 0; colIdx < 5; colIdx++) {
@@ -173,7 +173,7 @@ public class TestTimestampsFilter {
byte [][] FAMILIES = new byte[][] { FAMILY };
// create table; set versions to max...
- Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE);
+ Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE);
Put p = new Put(Bytes.toBytes("row"));
p.add(FAMILY, Bytes.toBytes("column0"), 3, Bytes.toBytes("value0-3"));
@@ -233,7 +233,7 @@ public class TestTimestampsFilter {
byte [][] FAMILIES = new byte[][] { FAMILY };
// create table; set versions to max...
- Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE);
+ Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE);
// For row:0, col:0: insert versions 1 through 5.
putNVersions(ht, FAMILY, 0, 0, 1, 5);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
index af50b52..de0057c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
@@ -66,7 +66,7 @@ public class TestHTableWrapper {
private static final HBaseTestingUtility util = new HBaseTestingUtility();
- private static final byte[] TEST_TABLE = Bytes.toBytes("test");
+ private static final TableName TEST_TABLE = TableName.valueOf("test");
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] ROW_A = Bytes.toBytes("aaa");
@@ -136,7 +136,7 @@ public class TestHTableWrapper {
CoprocessorEnvironment env = cpHost.findCoprocessorEnvironment(implClazz.getName());
assertEquals(Coprocessor.VERSION, env.getVersion());
assertEquals(VersionInfo.getVersion(), env.getHBaseVersion());
- hTableInterface = env.getTable(TableName.valueOf(TEST_TABLE));
+ hTableInterface = env.getTable(TEST_TABLE);
checkHTableInterfaceMethods();
cpHost.shutdown(env);
}
@@ -170,7 +170,7 @@ public class TestHTableWrapper {
}
private void checkNameAndDescriptor() throws IOException {
- assertArrayEquals(TEST_TABLE, hTableInterface.getTableName());
+ assertEquals(TEST_TABLE, hTableInterface.getName());
assertEquals(table.getTableDescriptor(), hTableInterface.getTableDescriptor());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
index f6e7437..57db176 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
@@ -162,7 +162,7 @@ public class TestOpenTableInCoprocessor {
admin.createTable(primary);
admin.createTable(other);
- Table table = new HTable(UTIL.getConfiguration(), "primary");
+ Table table = new HTable(UTIL.getConfiguration(), TableName.valueOf("primary"));
Put p = new Put(new byte[] { 'a' });
p.add(family, null, new byte[] { 'a' });
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
index dc96d50..1c81adf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
@@ -160,7 +160,7 @@ public class TestColumnRangeFilter {
public void TestColumnRangeFilterClient() throws Exception {
String family = "Family";
String table = "TestColumnRangeFilterClient";
- Table ht = TEST_UTIL.createTable(Bytes.toBytes(table),
+ Table ht = TEST_UTIL.createTable(TableName.valueOf(table),
Bytes.toBytes(family), Integer.MAX_VALUE);
List<String> rows = generateRandomWords(10, 8);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
index 8395817..565c7db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
@@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -89,7 +90,7 @@ public class TestFuzzyRowAndColumnRangeFilter {
public void Test() throws Exception {
String cf = "f";
String table = "TestFuzzyAndColumnRangeFilterClient";
- Table ht = TEST_UTIL.createTable(Bytes.toBytes(table),
+ Table ht = TEST_UTIL.createTable(TableName.valueOf(table),
Bytes.toBytes(cf), Integer.MAX_VALUE);
// 10 byte row key - (2 bytes 4 bytes 4 bytes)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index f21176b..409ac8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -260,7 +261,7 @@ public class TestBlockReorder {
// We use the regionserver file system & conf as we expect it to have the hook.
conf = targetRs.getConfiguration();
HFileSystem rfs = (HFileSystem) targetRs.getFileSystem();
- Table h = htu.createTable("table".getBytes(), sb);
+ Table h = htu.createTable(TableName.valueOf("table"), sb);
// Now, we have 4 datanodes and a replication count of 3. So we don't know if the datanode
// with the same node will be used. We can't really stop an existing datanode, this would