You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2014/09/30 03:40:01 UTC
[2/3] HBASE-12042 Replace internal uses of HTable(Configuration,
String) with HTable(Configuration, TableName) (Solomon Duskis)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index c961e4e..0be8931 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -74,7 +74,7 @@ public class TestChangingEncoding {
private HBaseAdmin admin;
private HColumnDescriptor hcd;
- private String tableName;
+ private TableName tableName;
private static final List<DataBlockEncoding> ENCODINGS_TO_ITERATE =
createEncodingsToIterate();
@@ -89,8 +89,8 @@ public class TestChangingEncoding {
private int numBatchesWritten;
private void prepareTest(String testId) throws IOException {
- tableName = "test_table_" + testId;
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
+ tableName = TableName.valueOf("test_table_" + testId);
+ HTableDescriptor htd = new HTableDescriptor(tableName);
hcd = new HColumnDescriptor(CF);
htd.addFamily(hcd);
admin.createTable(htd);
@@ -135,7 +135,7 @@ public class TestChangingEncoding {
+ "_col" + j);
}
- static void writeTestDataBatch(Configuration conf, String tableName,
+ static void writeTestDataBatch(Configuration conf, TableName tableName,
int batchId) throws Exception {
LOG.debug("Writing test data batch " + batchId);
Table table = new HTable(conf, tableName);
@@ -153,7 +153,7 @@ public class TestChangingEncoding {
table.close();
}
- static void verifyTestDataBatch(Configuration conf, String tableName,
+ static void verifyTestDataBatch(Configuration conf, TableName tableName,
int batchId) throws Exception {
LOG.debug("Verifying test data batch " + batchId);
Table table = new HTable(conf, tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index 6a6da1f..9fb4eb8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -91,7 +91,7 @@ public class TestTableInputFormat {
* @throws IOException
*/
public static Table createTable(byte[] tableName) throws IOException {
- Table table = UTIL.createTable(tableName, FAMILY);
+ Table table = UTIL.createTable(TableName.valueOf(tableName), new byte[][]{FAMILY});
Put p = new Put("aaa".getBytes());
p.add(FAMILY, null, "value aaa".getBytes());
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
index c827751..107837e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
@@ -80,19 +80,19 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
jobConf = new JobConf(UTIL.getConfiguration(), TestTableMapReduce.class);
jobConf.setJobName("process column contents");
jobConf.setNumReduceTasks(1);
- TableMapReduceUtil.initTableMapJob(Bytes.toString(table.getTableName()),
+ TableMapReduceUtil.initTableMapJob(table.getName().getNameAsString(),
Bytes.toString(INPUT_FAMILY), ProcessContentsMapper.class,
ImmutableBytesWritable.class, Put.class, jobConf);
- TableMapReduceUtil.initTableReduceJob(Bytes.toString(table.getTableName()),
+ TableMapReduceUtil.initTableReduceJob(table.getName().getNameAsString(),
IdentityTableReduce.class, jobConf);
- LOG.info("Started " + Bytes.toString(table.getTableName()));
+ LOG.info("Started " + table.getName());
RunningJob job = JobClient.runJob(jobConf);
assertTrue(job.isSuccessful());
LOG.info("After map/reduce completion");
// verify map-reduce results
- verify(Bytes.toString(table.getTableName()));
+ verify(table.getName());
} finally {
if (jobConf != null) {
FileUtil.fullyDelete(new File(jobConf.get("hadoop.tmp.dir")));
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index 451fce5..6e9f8d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -33,6 +33,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.client.Put;
@@ -88,7 +89,7 @@ public class TestTableMapReduceUtil {
@BeforeClass
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
- presidentsTable = createAndFillTable(Bytes.toBytes(TABLE_NAME));
+ presidentsTable = createAndFillTable(TableName.valueOf(TABLE_NAME));
UTIL.startMiniMapReduceCluster();
}
@@ -105,7 +106,7 @@ public class TestTableMapReduceUtil {
LOG.info("before done");
}
- public static Table createAndFillTable(byte[] tableName) throws IOException {
+ public static Table createAndFillTable(TableName tableName) throws IOException {
Table table = UTIL.createTable(tableName, COLUMN_FAMILY);
createPutCommand(table);
return table;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 73a0d1e..fac90a8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -30,6 +30,7 @@ import java.io.PrintStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -80,8 +81,8 @@ public class TestCopyTable {
*/
@Test
public void testCopyTable() throws Exception {
- final byte[] TABLENAME1 = Bytes.toBytes("testCopyTable1");
- final byte[] TABLENAME2 = Bytes.toBytes("testCopyTable2");
+ final TableName TABLENAME1 = TableName.valueOf("testCopyTable1");
+ final TableName TABLENAME2 = TableName.valueOf("testCopyTable2");
final byte[] FAMILY = Bytes.toBytes("family");
final byte[] COLUMN1 = Bytes.toBytes("c1");
@@ -99,8 +100,8 @@ public class TestCopyTable {
assertEquals(
0,
- copy.run(new String[] { "--new.name=" + Bytes.toString(TABLENAME2),
- Bytes.toString(TABLENAME1) }));
+ copy.run(new String[] { "--new.name=" + TABLENAME2.getNameAsString(),
+ TABLENAME1.getNameAsString() }));
// verify the data was copied into table 2
for (int i = 0; i < 10; i++) {
@@ -118,8 +119,8 @@ public class TestCopyTable {
@Test
public void testStartStopRow() throws Exception {
- final byte[] TABLENAME1 = Bytes.toBytes("testStartStopRow1");
- final byte[] TABLENAME2 = Bytes.toBytes("testStartStopRow2");
+ final TableName TABLENAME1 = TableName.valueOf("testStartStopRow1");
+ final TableName TABLENAME2 = TableName.valueOf("testStartStopRow2");
final byte[] FAMILY = Bytes.toBytes("family");
final byte[] COLUMN1 = Bytes.toBytes("c1");
final byte[] ROW0 = Bytes.toBytes("row0");
@@ -143,8 +144,8 @@ public class TestCopyTable {
CopyTable copy = new CopyTable(TEST_UTIL.getConfiguration());
assertEquals(
0,
- copy.run(new String[] { "--new.name=" + Bytes.toString(TABLENAME2), "--startrow=row1",
- "--stoprow=row2", Bytes.toString(TABLENAME1) }));
+ copy.run(new String[] { "--new.name=" + TABLENAME2, "--startrow=row1",
+ "--stoprow=row2", TABLENAME1.getNameAsString() }));
// verify the data was copied into table 2
// row1 exist, row0, row2 do not exist
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
index bf4d8a0..33d0e74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
@@ -17,6 +17,7 @@ package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -52,8 +53,8 @@ public class TestHRegionPartitioner {
byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") };
- UTIL.createTable(Bytes.toBytes("out_table"), families, 1, Bytes.toBytes("aa"),
- Bytes.toBytes("cc"), 3);
+ UTIL.createTable(TableName.valueOf("out_table"), families, 1,
+ Bytes.toBytes("aa"), Bytes.toBytes("cc"), 3);
HRegionPartitioner<Long, Long> partitioner = new HRegionPartitioner<Long, Long>();
Configuration configuration = UTIL.getConfiguration();
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 91ff718..04b1a92 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -161,7 +161,7 @@ public class TestImportExport {
@Test
public void testSimpleCase() throws Exception {
String EXPORT_TABLE = "exportSimpleCase";
- Table t = UTIL.createTable(Bytes.toBytes(EXPORT_TABLE), FAMILYA, 3);
+ Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);
Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL);
p.add(FAMILYA, QUAL, now+1, QUAL);
@@ -181,7 +181,7 @@ public class TestImportExport {
assertTrue(runExport(args));
String IMPORT_TABLE = "importTableSimpleCase";
- t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB, 3);
+ t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);
args = new String[] {
"-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
IMPORT_TABLE,
@@ -224,7 +224,7 @@ public class TestImportExport {
fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR
+ "exportedTableIn94Format"));
String IMPORT_TABLE = "importTableExportedFrom94";
- Table t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), Bytes.toBytes("f1"), 3);
+ Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);
String[] args = new String[] {
"-Dhbase.import.version=0.94" ,
IMPORT_TABLE, FQ_OUTPUT_DIR
@@ -254,7 +254,7 @@ public class TestImportExport {
.setMaxVersions(1)
);
UTIL.getHBaseAdmin().createTable(desc);
- Table t = new HTable(UTIL.getConfiguration(), BATCH_TABLE);
+ Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());
Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL);
@@ -285,7 +285,7 @@ public class TestImportExport {
.setKeepDeletedCells(true)
);
UTIL.getHBaseAdmin().createTable(desc);
- Table t = new HTable(UTIL.getConfiguration(), EXPORT_TABLE);
+ Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());
Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL);
@@ -317,7 +317,7 @@ public class TestImportExport {
);
UTIL.getHBaseAdmin().createTable(desc);
t.close();
- t = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
+ t = new HTable(UTIL.getConfiguration(), desc.getTableName());
args = new String[] {
IMPORT_TABLE,
FQ_OUTPUT_DIR
@@ -351,7 +351,7 @@ public class TestImportExport {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getHBaseAdmin().createTable(desc);
- Table exportTable = new HTable(UTIL.getConfiguration(), EXPORT_TABLE);
+ Table exportTable = new HTable(UTIL.getConfiguration(), desc.getTableName());
Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL);
@@ -378,7 +378,7 @@ public class TestImportExport {
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getHBaseAdmin().createTable(desc);
- Table importTable = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
+ Table importTable = new HTable(UTIL.getConfiguration(), desc.getTableName());
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
"-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR,
"1000" };
@@ -544,7 +544,7 @@ public class TestImportExport {
public void testDurability() throws IOException, InterruptedException, ClassNotFoundException {
// Create an export table.
String exportTableName = "exporttestDurability";
- Table exportTable = UTIL.createTable(Bytes.toBytes(exportTableName), FAMILYA, 3);
+ Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);
// Insert some data
Put put = new Put(ROW1);
@@ -565,7 +565,7 @@ public class TestImportExport {
// Create the table for import
String importTableName = "importTestDurability1";
- Table importTable = UTIL.createTable(Bytes.toBytes(importTableName), FAMILYA, 3);
+ Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
// Register the hlog listener for the import table
TableWALActionListener walListener = new TableWALActionListener(importTableName);
@@ -584,7 +584,7 @@ public class TestImportExport {
// Run the import with the default durability option
importTableName = "importTestDurability2";
- importTable = UTIL.createTable(Bytes.toBytes(importTableName), FAMILYA, 3);
+ importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
hLog.unregisterWALActionsListener(walListener);
walListener = new TableWALActionListener(importTableName);
hLog.registerWALActionsListener(walListener);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index f31a916..eddee5a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
@@ -120,7 +121,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest=>myvalue\n";
- util.createTable(tableName, FAMILY);
+ util.createTable(TableName.valueOf(tableName), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, true);
util.deleteTable(tableName);
}
@@ -136,7 +137,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest1=>myvalue\n";
- util.createTable(tableName, FAMILY);
+ util.createTable(TableName.valueOf(tableName), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, false);
util.deleteTable(tableName);
}
@@ -177,7 +178,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
LOG.debug("Running ImportTsv with arguments: " + argv);
assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
- validateTable(conf, table, family, valueMultiplier, dataAvailable);
+ validateTable(conf, TableName.valueOf(table), family, valueMultiplier, dataAvailable);
if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
LOG.debug("Deleting test subdirectory");
@@ -191,7 +192,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
*
* @param dataAvailable
*/
- private static void validateTable(Configuration conf, String tableName, String family,
+ private static void validateTable(Configuration conf, TableName tableName, String family,
int valueMultiplier, boolean dataAvailable) throws IOException {
LOG.debug("Validating table.");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index b15d04e..0ca0f8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
@@ -161,20 +162,20 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
- util.createTable(tableName, FAMILY);
+ util.createTable(TableName.valueOf(tableName), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
@Test
public void testMROnTableWithDeletes() throws Exception {
- String tableName = "test-" + UUID.randomUUID();
+ TableName tableName = TableName.valueOf("test-" + UUID.randomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
- "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
+ "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
@@ -182,7 +183,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.deleteTable(tableName);
}
- private void issueDeleteAndVerifyData(String tableName) throws IOException {
+ private void issueDeleteAndVerifyData(TableName tableName) throws IOException {
LOG.debug("Validating table after delete.");
Table table = new HTable(conf, tableName);
boolean verified = false;
@@ -229,7 +230,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
+ "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
- util.createTable(tableName, FAMILY);
+ util.createTable(TableName.valueOf(tableName), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
@@ -266,7 +267,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n";
- util.createTable(tableName, FAMILY);
+ util.createTable(TableName.valueOf(tableName), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
@@ -283,12 +284,13 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
*/
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
String[] args, int valueMultiplier) throws Exception {
- String table = args[args.length - 1];
+ TableName table = TableName.valueOf(args[args.length - 1]);
Configuration conf = new Configuration(util.getConfiguration());
// populate input file
FileSystem fs = FileSystem.get(conf);
- Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
+ Path inputPath = fs.makeQualified(new Path(util
+ .getDataTestDirOnTestFS(table.getNameAsString()), "input.dat"));
FSDataOutputStream op = fs.create(inputPath, true);
if (data == null) {
data = "KEY\u001bVALUE1\u001bVALUE2\n";
@@ -330,7 +332,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
LOG.debug("Deleting test subdirectory");
- util.cleanupDataTestDirOnTestFS(table);
+ util.cleanupDataTestDirOnTestFS(table.getNameAsString());
}
return tool;
}
@@ -364,7 +366,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
/**
* Confirm ImportTsv via data in online table.
*/
- private static void validateTable(Configuration conf, String tableName, String family,
+ private static void validateTable(Configuration conf, TableName tableName, String family,
int valueMultiplier) throws IOException {
LOG.debug("Validating table.");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 8706d9c..e3b3495 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -111,7 +112,7 @@ public class TestImportTsv implements Configurable {
table
};
- util.createTable(table, FAMILY);
+ util.createTable(TableName.valueOf(table), FAMILY);
doMROnTableTest(util, FAMILY, null, args, 1);
util.deleteTable(table);
}
@@ -129,7 +130,7 @@ public class TestImportTsv implements Configurable {
};
String data = "KEY,1234,VALUE1,VALUE2\n";
- util.createTable(table, FAMILY);
+ util.createTable(TableName.valueOf(table), FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(table);
}
@@ -146,7 +147,7 @@ public class TestImportTsv implements Configurable {
table
};
- util.createTable(table, FAMILY);
+ util.createTable(TableName.valueOf(table), FAMILY);
doMROnTableTest(util, FAMILY, null, args, 3);
util.deleteTable(table);
}
@@ -181,7 +182,7 @@ public class TestImportTsv implements Configurable {
table
};
- util.createTable(table, FAMILY);
+ util.createTable(TableName.valueOf(table), FAMILY);
doMROnTableTest(util, FAMILY, null, args, 3);
util.deleteTable(table);
}
@@ -288,7 +289,7 @@ public class TestImportTsv implements Configurable {
if (createdHFiles)
validateHFiles(fs, outputPath, family);
else
- validateTable(conf, table, family, valueMultiplier);
+ validateTable(conf, TableName.valueOf(table), family, valueMultiplier);
if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
LOG.debug("Deleting test subdirectory");
@@ -300,7 +301,7 @@ public class TestImportTsv implements Configurable {
/**
* Confirm ImportTsv via data in online table.
*/
- private static void validateTable(Configuration conf, String tableName,
+ private static void validateTable(Configuration conf, TableName tableName,
String family, int valueMultiplier) throws IOException {
LOG.debug("Validating table.");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
index 338202c..f44909a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
@@ -121,10 +121,10 @@ public class TestLoadIncrementalHFilesSplitRecovery {
* Creates a table with given table name and specified number of column
* families if the table does not already exist.
*/
- private void setupTable(String table, int cfs) throws IOException {
+ private void setupTable(TableName table, int cfs) throws IOException {
try {
LOG.info("Creating table " + table);
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
+ HTableDescriptor htd = new HTableDescriptor(table);
for (int i = 0; i < cfs; i++) {
htd.addFamily(new HColumnDescriptor(family(i)));
}
@@ -142,11 +142,11 @@ public class TestLoadIncrementalHFilesSplitRecovery {
* @param cfs
* @param SPLIT_KEYS
*/
- private void setupTableWithSplitkeys(String table, int cfs, byte[][] SPLIT_KEYS)
+ private void setupTableWithSplitkeys(TableName table, int cfs, byte[][] SPLIT_KEYS)
throws IOException {
try {
LOG.info("Creating table " + table);
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
+ HTableDescriptor htd = new HTableDescriptor(table);
for (int i = 0; i < cfs; i++) {
htd.addFamily(new HColumnDescriptor(family(i)));
}
@@ -157,9 +157,9 @@ public class TestLoadIncrementalHFilesSplitRecovery {
}
}
- private Path buildBulkFiles(String table, int value) throws Exception {
- Path dir = util.getDataTestDirOnTestFS(table);
- Path bulk1 = new Path(dir, table+value);
+ private Path buildBulkFiles(TableName table, int value) throws Exception {
+ Path dir = util.getDataTestDirOnTestFS(table.getNameAsString());
+ Path bulk1 = new Path(dir, table.getNameAsString() + value);
FileSystem fs = util.getTestFileSystem();
buildHFiles(fs, bulk1, value);
return bulk1;
@@ -168,26 +168,25 @@ public class TestLoadIncrementalHFilesSplitRecovery {
/**
* Populate table with known values.
*/
- private void populateTable(String table, int value) throws Exception {
+ private void populateTable(TableName table, int value) throws Exception {
// create HFiles for different column families
LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration());
Path bulk1 = buildBulkFiles(table, value);
- HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
+ HTable t = new HTable(util.getConfiguration(), table);
lih.doBulkLoad(bulk1, t);
}
/**
* Split the known table in half. (this is hard coded for this test suite)
*/
- private void forceSplit(String table) {
+ private void forceSplit(TableName table) {
try {
// need to call regions server to by synchronous but isn't visible.
- HRegionServer hrs = util.getRSForFirstRegionInTable(Bytes
- .toBytes(table));
+ HRegionServer hrs = util.getRSForFirstRegionInTable(table);
for (HRegionInfo hri :
ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) {
- if (Bytes.equals(hri.getTable().getName(), Bytes.toBytes(table))) {
+ if (hri.getTable().equals(table)) {
// splitRegion doesn't work if startkey/endkey are null
ProtobufUtil.split(hrs.getRSRpcServices(), hri, rowkey(ROWCOUNT / 2)); // hard code split
}
@@ -199,7 +198,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
regions = 0;
for (HRegionInfo hri :
ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) {
- if (Bytes.equals(hri.getTable().getName(), Bytes.toBytes(table))) {
+ if (hri.getTable().equals(table)) {
regions++;
}
}
@@ -232,10 +231,11 @@ public class TestLoadIncrementalHFilesSplitRecovery {
* expected number of rows.
* @throws IOException
*/
- void assertExpectedTable(String table, int count, int value) throws IOException {
+ void assertExpectedTable(TableName table, int count, int value) throws IOException {
Table t = null;
try {
- assertEquals(util.getHBaseAdmin().listTables(table).length, 1);
+ assertEquals(
+ util.getHBaseAdmin().listTables(table.getNameAsString()).length, 1);
t = new HTable(util.getConfiguration(), table);
Scan s = new Scan();
ResultScanner sr = t.getScanner(s);
@@ -262,7 +262,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
*/
@Test(expected=IOException.class)
public void testBulkLoadPhaseFailure() throws Exception {
- String table = "bulkLoadPhaseFailure";
+ TableName table = TableName.valueOf("bulkLoadPhaseFailure");
setupTable(table, 10);
final AtomicInteger attmptedCalls = new AtomicInteger();
@@ -293,7 +293,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
// create HFiles for different column families
Path dir = buildBulkFiles(table, 1);
- HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
+ HTable t = new HTable(util.getConfiguration(), table);
lih.doBulkLoad(dir, t);
} finally {
util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
@@ -334,7 +334,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
*/
@Test
public void testSplitWhileBulkLoadPhase() throws Exception {
- final String table = "splitWhileBulkloadPhase";
+ final TableName table = TableName.valueOf("splitWhileBulkloadPhase");
setupTable(table, 10);
populateTable(table,1);
assertExpectedTable(table, ROWCOUNT, 1);
@@ -359,7 +359,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
};
// create HFiles for different column families
- HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
+ HTable t = new HTable(util.getConfiguration(), table);
Path bulk = buildBulkFiles(table, 2);
lih2.doBulkLoad(bulk, t);
@@ -376,7 +376,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
*/
@Test
public void testGroupOrSplitPresplit() throws Exception {
- final String table = "groupOrSplitPresplit";
+ final TableName table = TableName.valueOf("groupOrSplitPresplit");
setupTable(table, 10);
populateTable(table, 1);
assertExpectedTable(table, ROWCOUNT, 1);
@@ -399,7 +399,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
// create HFiles for different column families
Path bulk = buildBulkFiles(table, 2);
- HTable ht = new HTable(util.getConfiguration(), Bytes.toBytes(table));
+ HTable ht = new HTable(util.getConfiguration(), table);
lih.doBulkLoad(bulk, ht);
assertExpectedTable(table, ROWCOUNT, 2);
@@ -412,7 +412,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
*/
@Test(expected = IOException.class)
public void testGroupOrSplitFailure() throws Exception {
- String table = "groupOrSplitFailure";
+ TableName table = TableName.valueOf("groupOrSplitFailure");
setupTable(table, 10);
LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
@@ -434,7 +434,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
// create HFiles for different column families
Path dir = buildBulkFiles(table,1);
- HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
+ HTable t = new HTable(util.getConfiguration(), table);
lih.doBulkLoad(dir, t);
fail("doBulkLoad should have thrown an exception");
@@ -442,9 +442,9 @@ public class TestLoadIncrementalHFilesSplitRecovery {
@Test
public void testGroupOrSplitWhenRegionHoleExistsInMeta() throws Exception {
- String tableName = "testGroupOrSplitWhenRegionHoleExistsInMeta";
+ TableName tableName = TableName.valueOf("testGroupOrSplitWhenRegionHoleExistsInMeta");
byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000100") };
- HTable table = new HTable(util.getConfiguration(), Bytes.toBytes(tableName));
+ HTable table = new HTable(util.getConfiguration(), tableName);
setupTableWithSplitkeys(tableName, 10, SPLIT_KEYS);
Path dir = buildBulkFiles(tableName, 2);
@@ -479,7 +479,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
// Mess it up by leaving a hole in the hbase:meta
HConnection hConnection = HConnectionManager.getConnection(util.getConfiguration());
List<HRegionInfo> regionInfos = MetaTableAccessor.getTableRegions(
- hConnection, TableName.valueOf(tableName));
+ hConnection, tableName);
for (HRegionInfo regionInfo : regionInfos) {
if (Bytes.equals(regionInfo.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
MetaTableAccessor.deleteRegion(hConnection, regionInfo);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
index a78b859..ae1ac8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -76,8 +77,7 @@ public class TestMultiTableInputFormat {
// create and fill table
for (int i = 0; i < 3; i++) {
HTable table =
- TEST_UTIL.createTable(Bytes.toBytes(TABLE_NAME + String.valueOf(i)),
- INPUT_FAMILY);
+ TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME + String.valueOf(i)), INPUT_FAMILY);
TEST_UTIL.createMultiRegions(TEST_UTIL.getConfiguration(), table, INPUT_FAMILY, 4);
TEST_UTIL.loadTable(table, INPUT_FAMILY, false);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index e63e746..e42d135 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -59,7 +59,7 @@ public class TestMultithreadedTableMapper {
private static final Log LOG = LogFactory.getLog(TestMultithreadedTableMapper.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
- static final byte[] MULTI_REGION_TABLE_NAME = Bytes.toBytes("mrtest");
+ static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest");
static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
static final byte[] OUTPUT_FAMILY = Bytes.toBytes("text");
static final int NUMBER_OF_THREADS = 10;
@@ -139,7 +139,7 @@ public class TestMultithreadedTableMapper {
Scan scan = new Scan();
scan.addFamily(INPUT_FAMILY);
TableMapReduceUtil.initTableMapperJob(
- Bytes.toString(table.getTableName()), scan,
+ table.getTableName(), scan,
MultithreadedTableMapper.class, ImmutableBytesWritable.class,
Put.class, job);
MultithreadedTableMapper.setMapperClass(job, ProcessContentsMapper.class);
@@ -148,11 +148,11 @@ public class TestMultithreadedTableMapper {
Bytes.toString(table.getTableName()),
IdentityTableReducer.class, job);
FileOutputFormat.setOutputPath(job, new Path("test"));
- LOG.info("Started " + Bytes.toString(table.getTableName()));
+ LOG.info("Started " + table.getTableName());
assertTrue(job.waitForCompletion(true));
LOG.info("After map/reduce completion");
// verify map-reduce results
- verify(Bytes.toString(table.getTableName()));
+ verify(table.getName());
} finally {
table.close();
if (job != null) {
@@ -162,7 +162,7 @@ public class TestMultithreadedTableMapper {
}
}
- private void verify(String tableName) throws IOException {
+ private void verify(TableName tableName) throws IOException {
Table table = new HTable(new Configuration(UTIL.getConfiguration()), tableName);
boolean verified = false;
long pause = UTIL.getConfiguration().getLong("hbase.client.pause", 5 * 1000);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 592992f..99fdfd4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -31,6 +31,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.client.Put;
@@ -68,8 +69,7 @@ public class TestRowCounter {
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster();
TEST_UTIL.startMiniMapReduceCluster();
- Table table = TEST_UTIL.createTable(Bytes.toBytes(TABLE_NAME),
- Bytes.toBytes(COL_FAM));
+ Table table = TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME), Bytes.toBytes(COL_FAM));
writeRows(table);
table.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
index 697289e..750ea39 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -77,7 +78,7 @@ public abstract class TestTableInputFormatScanBase {
// start mini hbase cluster
TEST_UTIL.startMiniCluster(3);
// create and fill table
- table = TEST_UTIL.createTable(TABLE_NAME, INPUT_FAMILY);
+ table = TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME), INPUT_FAMILY);
TEST_UTIL.createMultiRegions(table, INPUT_FAMILY);
TEST_UTIL.loadTable(table, INPUT_FAMILY, false);
// start MR cluster
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index fa345f2..11a35f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -111,7 +111,7 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
LOG.info("After map/reduce completion");
// verify map-reduce results
- verify(Bytes.toString(table.getTableName()));
+ verify(table.getName());
} catch (InterruptedException e) {
throw new IOException(e);
} catch (ClassNotFoundException e) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 789c874..c218bc9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -132,7 +133,7 @@ public abstract class TestTableMapReduceBase {
return outval;
}
- protected void verify(String tableName) throws IOException {
+ protected void verify(TableName tableName) throws IOException {
Table table = new HTable(UTIL.getConfiguration(), tableName);
boolean verified = false;
long pause = UTIL.getConfiguration().getLong("hbase.client.pause", 5 * 1000);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index f227692..bde3bc6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -87,8 +87,8 @@ public class TestWALPlayer {
*/
@Test
public void testWALPlayer() throws Exception {
- final byte[] TABLENAME1 = Bytes.toBytes("testWALPlayer1");
- final byte[] TABLENAME2 = Bytes.toBytes("testWALPlayer2");
+ final TableName TABLENAME1 = TableName.valueOf("testWALPlayer1");
+ final TableName TABLENAME2 = TableName.valueOf("testWALPlayer2");
final byte[] FAMILY = Bytes.toBytes("family");
final byte[] COLUMN1 = Bytes.toBytes("c1");
final byte[] COLUMN2 = Bytes.toBytes("c2");
@@ -118,8 +118,8 @@ public class TestWALPlayer {
configuration.set(optionName, "1000");
player.setupTime(configuration, optionName);
assertEquals(1000,configuration.getLong(optionName,0));
- assertEquals(0, player.run(new String[] { walInputDir, Bytes.toString(TABLENAME1),
- Bytes.toString(TABLENAME2) }));
+ assertEquals(0, player.run(new String[] {walInputDir, TABLENAME1.getNameAsString(),
+ TABLENAME2.getNameAsString() }));
// verify the WAL was player into table 2
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index 0af95b9..7ab47ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -218,7 +218,7 @@ public class TestMasterFailover {
assertTrue(master.isInitialized());
// Create a table with a region online
- RegionLocator onlineTable = TEST_UTIL.createTable("onlineTable", "family");
+ RegionLocator onlineTable = TEST_UTIL.createTable(TableName.valueOf("onlineTable"), "family");
// Create a table in META, so it has a region offline
HTableDescriptor offlineTable = new HTableDescriptor(
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
index 5f9f0b8..374366e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
public class TestMasterTransitions {
private static final Log LOG = LogFactory.getLog(TestMasterTransitions.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final String TABLENAME = "master_transitions";
+ private static final TableName TABLENAME = TableName.valueOf("master_transitions");
private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"),
Bytes.toBytes("b"), Bytes.toBytes("c")};
@@ -64,11 +64,10 @@ public class TestMasterTransitions {
TEST_UTIL.getConfiguration().setBoolean("dfs.support.append", true);
TEST_UTIL.startMiniCluster(2);
// Create a table of three families. This will assign a region.
- TableName tableName = TableName.valueOf(TABLENAME);
- TEST_UTIL.createTable(tableName, FAMILIES);
+ TEST_UTIL.createTable(TABLENAME, FAMILIES);
HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME);
int countOfRegions = TEST_UTIL.createMultiRegions(t, getTestFamily());
- TEST_UTIL.waitUntilAllRegionsAssigned(tableName);
+ TEST_UTIL.waitUntilAllRegionsAssigned(TABLENAME);
addToEachStartKey(countOfRegions);
t.close();
}
@@ -491,12 +490,12 @@ public class TestMasterTransitions {
for (Result r = null; (r = s.next()) != null;) {
HRegionInfo hri = HRegionInfo.getHRegionInfo(r);
if (hri == null) break;
- if (!hri.getTable().getNameAsString().equals(TABLENAME)) {
+ if (!hri.getTable().equals(TABLENAME)) {
continue;
}
// If start key, add 'aaa'.
- if(!hri.getTable().getNameAsString().equals(TABLENAME)) {
+ if(!hri.getTable().equals(TABLENAME)) {
continue;
}
byte [] row = getStartKey(hri);
@@ -529,4 +528,4 @@ public class TestMasterTransitions {
private static byte [] getTestQualifier() {
return getTestFamily();
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
index 1f672d3..0b1ccb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
@@ -50,10 +50,10 @@ public class TestRestartCluster {
private static final Log LOG = LogFactory.getLog(TestRestartCluster.class);
private HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final byte [][] TABLES = {
- Bytes.toBytes("restartTableOne"),
- Bytes.toBytes("restartTableTwo"),
- Bytes.toBytes("restartTableThree")
+ private static final TableName[] TABLES = {
+ TableName.valueOf("restartTableOne"),
+ TableName.valueOf("restartTableTwo"),
+ TableName.valueOf("restartTableThree")
};
private static final byte [] FAMILY = Bytes.toBytes("family");
@@ -68,11 +68,11 @@ public class TestRestartCluster {
Threads.sleep(1);
}
LOG.info("\n\nCreating tables");
- for(byte [] TABLE : TABLES) {
+ for(TableName TABLE : TABLES) {
UTIL.createTable(TABLE, FAMILY);
}
- for(byte [] TABLE : TABLES) {
- UTIL.waitTableEnabled(TABLE);
+ for(TableName TABLE : TABLES) {
+ UTIL.waitTableEnabled(TABLE.getName());
}
List<HRegionInfo> allRegions =
@@ -94,14 +94,14 @@ public class TestRestartCluster {
allRegions = MetaScanner.listAllRegions(new Configuration(UTIL.getConfiguration()), true);
assertEquals(4, allRegions.size());
LOG.info("\n\nWaiting for tables to be available");
- for(byte [] TABLE: TABLES) {
+ for(TableName TABLE: TABLES) {
try {
UTIL.createTable(TABLE, FAMILY);
assertTrue("Able to create table that should already exist", false);
} catch(TableExistsException tee) {
LOG.info("Table already exists as expected");
}
- UTIL.waitTableAvailable(TABLE);
+ UTIL.waitTableAvailable(TABLE.getName());
}
}
@@ -118,11 +118,11 @@ public class TestRestartCluster {
UTIL.getMiniHBaseCluster().getMaster().
getMasterRpcServices().synchronousBalanceSwitch(false);
LOG.info("\n\nCreating tables");
- for(byte [] TABLE : TABLES) {
+ for(TableName TABLE : TABLES) {
UTIL.createTable(TABLE, FAMILY);
}
- for(byte [] TABLE : TABLES) {
- UTIL.waitTableEnabled(TABLE);
+ for(TableName TABLE : TABLES) {
+ UTIL.waitTableEnabled(TABLE.getName());
}
HMaster master = UTIL.getMiniHBaseCluster().getMaster();
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index 0f3db18..4311b29 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -81,7 +81,7 @@ public class TestCompactionState {
@Test
public void testInvalidColumnFamily() throws IOException, InterruptedException {
- byte [] table = Bytes.toBytes("testInvalidColumnFamily");
+ TableName table = TableName.valueOf("testInvalidColumnFamily");
byte [] family = Bytes.toBytes("family");
byte [] fakecf = Bytes.toBytes("fakecf");
boolean caughtMinorCompact = false;
@@ -124,8 +124,7 @@ public class TestCompactionState {
final CompactionState expectedState, boolean singleFamily)
throws IOException, InterruptedException {
// Create a table with regions
- TableName table =
- TableName.valueOf(tableName);
+ TableName table = TableName.valueOf(tableName);
byte [] family = Bytes.toBytes("family");
byte [][] families =
{family, Bytes.add(family, Bytes.toBytes("2")), Bytes.add(family, Bytes.toBytes("3"))};
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index d0b7ea3..efae472 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -52,7 +52,7 @@ public class TestEncryptionRandomKeying {
private static Configuration conf = TEST_UTIL.getConfiguration();
private static HTableDescriptor htd;
- private static List<Path> findStorefilePaths(byte[] tableName) throws Exception {
+ private static List<Path> findStorefilePaths(TableName tableName) throws Exception {
List<Path> paths = new ArrayList<Path>();
for (HRegion region:
TEST_UTIL.getRSForFirstRegionInTable(tableName).getOnlineRegions(htd.getTableName())) {
@@ -103,7 +103,7 @@ public class TestEncryptionRandomKeying {
TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
// Create a store file
- Table table = new HTable(conf, htd.getName());
+ Table table = new HTable(conf, htd.getTableName());
try {
table.put(new Put(Bytes.toBytes("testrow"))
.add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
@@ -121,7 +121,7 @@ public class TestEncryptionRandomKeying {
@Test
public void testRandomKeying() throws Exception {
// Verify we have store file(s) with a random key
- final List<Path> initialPaths = findStorefilePaths(htd.getName());
+ final List<Path> initialPaths = findStorefilePaths(htd.getTableName());
assertTrue(initialPaths.size() > 0);
for (Path path: initialPaths) {
assertNotNull("Store file " + path + " is not encrypted", extractHFileKey(path));
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
index 91c62ed..0e94e68 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
@@ -133,9 +133,9 @@ public class TestHRegionServerBulkLoad {
public static class AtomicHFileLoader extends RepeatingTestThread {
final AtomicLong numBulkLoads = new AtomicLong();
final AtomicLong numCompactions = new AtomicLong();
- private String tableName;
+ private TableName tableName;
- public AtomicHFileLoader(String tableName, TestContext ctx,
+ public AtomicHFileLoader(TableName tableName, TestContext ctx,
byte targetFamilies[][]) throws IOException {
super(ctx);
this.tableName = tableName;
@@ -160,9 +160,8 @@ public class TestHRegionServerBulkLoad {
// bulk load HFiles
final HConnection conn = UTIL.getHBaseAdmin().getConnection();
- TableName tbl = TableName.valueOf(tableName);
RegionServerCallable<Void> callable =
- new RegionServerCallable<Void>(conn, tbl, Bytes.toBytes("aaa")) {
+ new RegionServerCallable<Void>(conn, tableName, Bytes.toBytes("aaa")) {
@Override
public Void call(int callTimeout) throws Exception {
LOG.debug("Going to connect to server " + getLocation() + " for row "
@@ -181,7 +180,7 @@ public class TestHRegionServerBulkLoad {
// Periodically do compaction to reduce the number of open file handles.
if (numBulkLoads.get() % 10 == 0) {
// 10 * 50 = 500 open file handles!
- callable = new RegionServerCallable<Void>(conn, tbl, Bytes.toBytes("aaa")) {
+ callable = new RegionServerCallable<Void>(conn, tableName, Bytes.toBytes("aaa")) {
@Override
public Void call(int callTimeout) throws Exception {
LOG.debug("compacting " + getLocation() + " for row "
@@ -210,9 +209,9 @@ public class TestHRegionServerBulkLoad {
HTable table;
AtomicLong numScans = new AtomicLong();
AtomicLong numRowsScanned = new AtomicLong();
- String TABLE_NAME;
+ TableName TABLE_NAME;
- public AtomicScanReader(String TABLE_NAME, TestContext ctx,
+ public AtomicScanReader(TableName TABLE_NAME, TestContext ctx,
byte targetFamilies[][]) throws IOException {
super(ctx);
this.TABLE_NAME = TABLE_NAME;
@@ -264,10 +263,10 @@ public class TestHRegionServerBulkLoad {
* Creates a table with given table name and specified number of column
* families if the table does not already exist.
*/
- private void setupTable(String table, int cfs) throws IOException {
+ private void setupTable(TableName table, int cfs) throws IOException {
try {
LOG.info("Creating table " + table);
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
+ HTableDescriptor htd = new HTableDescriptor(table);
for (int i = 0; i < 10; i++) {
htd.addFamily(new HColumnDescriptor(family(i)));
}
@@ -283,7 +282,7 @@ public class TestHRegionServerBulkLoad {
*/
@Test
public void testAtomicBulkLoad() throws Exception {
- String TABLE_NAME = "atomicBulkLoad";
+ TableName TABLE_NAME = TableName.valueOf("atomicBulkLoad");
int millisToRun = 30000;
int numScanners = 50;
@@ -296,7 +295,7 @@ public class TestHRegionServerBulkLoad {
}
}
- void runAtomicBulkloadTest(String tableName, int millisToRun, int numScanners)
+ void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners)
throws Exception {
setupTable(tableName, 10);
@@ -336,7 +335,7 @@ public class TestHRegionServerBulkLoad {
Configuration c = HBaseConfiguration.create();
TestHRegionServerBulkLoad test = new TestHRegionServerBulkLoad();
test.setConf(c);
- test.runAtomicBulkloadTest("atomicTableTest", 5 * 60 * 1000, 50);
+ test.runAtomicBulkloadTest(TableName.valueOf("atomicTableTest"), 5 * 60 * 1000, 50);
} finally {
System.exit(0); // something hangs (believe it is lru threadpool)
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
index 9388d5f..6e4030b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
@@ -77,7 +77,7 @@ public class TestRegionReplicas {
@BeforeClass
public static void before() throws Exception {
HTU.startMiniCluster(NB_SERVERS);
- final byte[] tableName = Bytes.toBytes(TestRegionReplicas.class.getSimpleName());
+ final TableName tableName = TableName.valueOf(TestRegionReplicas.class.getSimpleName());
// Create table then get the single region for our new table.
table = HTU.createTable(tableName, f);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 36baa05..3ae82ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -89,7 +89,7 @@ public class TestRegionServerMetrics {
String regionMetricsKey = "regionCount";
long regions = metricsHelper.getGaugeLong(regionMetricsKey, serverSource);
// Creating a table should add one region
- TEST_UTIL.createTable(Bytes.toBytes("table"), Bytes.toBytes("cf"));
+ TEST_UTIL.createTable(TableName.valueOf("table"), Bytes.toBytes("cf"));
metricsHelper.assertGaugeGt(regionMetricsKey, regions, serverSource);
}
@@ -101,7 +101,7 @@ public class TestRegionServerMetrics {
@Test
public void testRequestCount() throws Exception {
String tableNameString = "testRequestCount";
- byte[] tName = Bytes.toBytes(tableNameString);
+ TableName tName = TableName.valueOf(tableNameString);
byte[] cfName = Bytes.toBytes("d");
byte[] row = Bytes.toBytes("rk");
byte[] qualifier = Bytes.toBytes("qual");
@@ -181,7 +181,7 @@ public class TestRegionServerMetrics {
@Test
public void testMutationsWithoutWal() throws Exception {
- byte[] tableName = Bytes.toBytes("testMutationsWithoutWal");
+ TableName tableName = TableName.valueOf("testMutationsWithoutWal");
byte[] cf = Bytes.toBytes("d");
byte[] row = Bytes.toBytes("rk");
byte[] qualifier = Bytes.toBytes("qual");
@@ -240,7 +240,7 @@ public class TestRegionServerMetrics {
@Test
public void testCheckAndPutCount() throws Exception {
String tableNameString = "testCheckAndPutCount";
- byte[] tableName = Bytes.toBytes(tableNameString);
+ TableName tableName = TableName.valueOf(tableNameString);
byte[] cf = Bytes.toBytes("d");
byte[] row = Bytes.toBytes("rk");
byte[] qualifier = Bytes.toBytes("qual");
@@ -276,7 +276,7 @@ public class TestRegionServerMetrics {
@Test
public void testIncrement() throws Exception {
String tableNameString = "testIncrement";
- byte[] tableName = Bytes.toBytes(tableNameString);
+ TableName tableName = TableName.valueOf(tableNameString);
byte[] cf = Bytes.toBytes("d");
byte[] row = Bytes.toBytes("rk");
byte[] qualifier = Bytes.toBytes("qual");
@@ -308,7 +308,7 @@ public class TestRegionServerMetrics {
@Test
public void testAppend() throws Exception {
String tableNameString = "testAppend";
- byte[] tableName = Bytes.toBytes(tableNameString);
+ TableName tableName = TableName.valueOf(tableNameString);
byte[] cf = Bytes.toBytes("d");
byte[] row = Bytes.toBytes("rk");
byte[] qualifier = Bytes.toBytes("qual");
@@ -340,7 +340,7 @@ public class TestRegionServerMetrics {
@Test
public void testScanNext() throws IOException {
String tableNameString = "testScanNext";
- byte[] tableName = Bytes.toBytes(tableNameString);
+ TableName tableName = TableName.valueOf(tableNameString);
byte[] cf = Bytes.toBytes("d");
byte[] qualifier = Bytes.toBytes("qual");
byte[] val = Bytes.toBytes("One");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index 8c7d36e..24d9dd4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.master.HMaster;
@@ -37,7 +38,6 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest
import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
@@ -70,10 +70,10 @@ public class TestRegionServerNoMaster {
@BeforeClass
public static void before() throws Exception {
HTU.startMiniCluster(NB_SERVERS);
- final byte[] tableName = Bytes.toBytes(TestRegionServerNoMaster.class.getSimpleName());
+ final TableName tableName = TableName.valueOf(TestRegionServerNoMaster.class.getSimpleName());
// Create table then get the single region for our new table.
- table = HTU.createTable(tableName, HConstants.CATALOG_FAMILY);
+ table = HTU.createTable(tableName,HConstants.CATALOG_FAMILY);
Put p = new Put(row);
p.add(HConstants.CATALOG_FAMILY, row, row);
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
index 1a2fc63..5593d80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -59,7 +60,7 @@ public class TestScannerWithBulkload {
TEST_UTIL.startMiniCluster(1);
}
- private static void createTable(Admin admin, String tableName) throws IOException {
+ private static void createTable(Admin admin, TableName tableName) throws IOException {
HTableDescriptor desc = new HTableDescriptor(tableName);
HColumnDescriptor hcd = new HColumnDescriptor("col");
hcd.setMaxVersions(3);
@@ -69,7 +70,7 @@ public class TestScannerWithBulkload {
@Test
public void testBulkLoad() throws Exception {
- String tableName = "testBulkLoad";
+ TableName tableName = TableName.valueOf("testBulkLoad");
long l = System.currentTimeMillis();
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
createTable(admin, tableName);
@@ -164,7 +165,7 @@ public class TestScannerWithBulkload {
return hfilePath;
}
- private HTable init(HBaseAdmin admin, long l, Scan scan, String tableName) throws Exception {
+ private HTable init(HBaseAdmin admin, long l, Scan scan, TableName tableName) throws Exception {
HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName);
Put put0 = new Put(Bytes.toBytes("row1"));
put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes
@@ -198,7 +199,7 @@ public class TestScannerWithBulkload {
@Test
public void testBulkLoadWithParallelScan() throws Exception {
- String tableName = "testBulkLoadWithParallelScan";
+ TableName tableName = TableName.valueOf("testBulkLoadWithParallelScan");
final long l = System.currentTimeMillis();
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
createTable(admin, tableName);
@@ -240,7 +241,7 @@ public class TestScannerWithBulkload {
@Test
public void testBulkLoadNativeHFile() throws Exception {
- String tableName = "testBulkLoadNativeHFile";
+ TableName tableName = TableName.valueOf("testBulkLoadNativeHFile");
long l = System.currentTimeMillis();
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
createTable(admin, tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index 158f05e..881699d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.ServerName;
@@ -134,7 +135,7 @@ public class TestServerCustomProtocol {
}
}
- private static final byte[] TEST_TABLE = Bytes.toBytes("test");
+ private static final TableName TEST_TABLE = TableName.valueOf("test");
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] ROW_A = Bytes.toBytes("aaa");
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 45dd273..cde1c6f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -177,10 +177,10 @@ public class TestSplitTransactionOnCluster {
TableName.valueOf("testRITStateForRollback");
try {
// Create table then get the single region for our new table.
- Table t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf"));
+ Table t = createTableAndWait(tableName, Bytes.toBytes("cf"));
final List<HRegion> regions = cluster.getRegions(tableName);
final HRegionInfo hri = getAndCheckSingleTableRegion(regions);
- insertData(tableName.getName(), admin, t);
+ insertData(tableName, admin, t);
t.close();
// Turn off balancer so it doesn't cut in and mess up our placements.
@@ -219,7 +219,7 @@ public class TestSplitTransactionOnCluster {
}
@Test(timeout = 60000)
public void testSplitFailedCompactionAndSplit() throws Exception {
- final byte[] tableName = Bytes.toBytes("testSplitFailedCompactionAndSplit");
+ final TableName tableName = TableName.valueOf("testSplitFailedCompactionAndSplit");
Configuration conf = TESTING_UTIL.getConfiguration();
HBaseAdmin admin = new HBaseAdmin(conf);
// Create table then get the single region for our new table.
@@ -284,8 +284,8 @@ public class TestSplitTransactionOnCluster {
*/
@Test (timeout = 300000) public void testRSSplitDaughtersAreOnlinedAfterShutdownHandling()
throws IOException, InterruptedException, ServiceException {
- final byte [] tableName =
- Bytes.toBytes("testRSSplitDaughtersAreOnlinedAfterShutdownHandling");
+ final TableName tableName =
+ TableName.valueOf("testRSSplitDaughtersAreOnlinedAfterShutdownHandling");
// Create table then get the single region for our new table.
HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
@@ -329,8 +329,8 @@ public class TestSplitTransactionOnCluster {
@Test (timeout = 300000) public void testExistingZnodeBlocksSplitAndWeRollback()
throws IOException, InterruptedException, NodeExistsException, KeeperException, ServiceException {
- final byte [] tableName =
- Bytes.toBytes("testExistingZnodeBlocksSplitAndWeRollback");
+ final TableName tableName =
+ TableName.valueOf("testExistingZnodeBlocksSplitAndWeRollback");
// Create table then get the single region for our new table.
HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
@@ -387,8 +387,8 @@ public class TestSplitTransactionOnCluster {
*/
@Test (timeout=300000) public void testShutdownFixupWhenDaughterHasSplit()
throws IOException, InterruptedException {
- final byte [] tableName =
- Bytes.toBytes("testShutdownFixupWhenDaughterHasSplit");
+ final TableName tableName =
+ TableName.valueOf("testShutdownFixupWhenDaughterHasSplit");
// Create table then get the single region for our new table.
HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
@@ -554,7 +554,8 @@ public class TestSplitTransactionOnCluster {
public void testMasterRestartAtRegionSplitPendingCatalogJanitor()
throws IOException, InterruptedException, NodeExistsException,
KeeperException, ServiceException {
- final byte[] tableName = Bytes.toBytes("testMasterRestartAtRegionSplitPendingCatalogJanitor");
+ final TableName tableName = TableName
+ .valueOf("testMasterRestartAtRegionSplitPendingCatalogJanitor");
// Create table then get the single region for our new table.
HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
@@ -622,13 +623,13 @@ public class TestSplitTransactionOnCluster {
final TableName tableName =
TableName.valueOf("testTableExistsIfTheSpecifiedTableRegionIsSplitParent");
// Create table then get the single region for our new table.
- Table t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf"));
+ Table t = createTableAndWait(tableName, Bytes.toBytes("cf"));
List<HRegion> regions = null;
try {
regions = cluster.getRegions(tableName);
int regionServerIndex = cluster.getServerWith(regions.get(0).getRegionName());
HRegionServer regionServer = cluster.getRegionServer(regionServerIndex);
- insertData(tableName.getName(), admin, t);
+ insertData(tableName, admin, t);
// Turn off balancer so it doesn't cut in and mess up our placements.
admin.setBalancerRunning(false, true);
// Turn off the meta scanner so it don't remove parent on us.
@@ -674,7 +675,7 @@ public class TestSplitTransactionOnCluster {
try {
int regionServerIndex = cluster.getServerWith(oldRegions.get(0).getRegionName());
HRegionServer regionServer = cluster.getRegionServer(regionServerIndex);
- insertData(tableName.getName(), admin, t);
+ insertData(tableName, admin, t);
// Turn off balancer so it doesn't cut in and mess up our placements.
admin.setBalancerRunning(false, true);
// Turn off the meta scanner so it don't remove parent on us.
@@ -731,7 +732,7 @@ public class TestSplitTransactionOnCluster {
}
}
- private void insertData(final byte[] tableName, HBaseAdmin admin, Table t) throws IOException,
+ private void insertData(final TableName tableName, HBaseAdmin admin, Table t) throws IOException,
InterruptedException {
Put p = new Put(Bytes.toBytes("row1"));
p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("1"));
@@ -758,7 +759,7 @@ public class TestSplitTransactionOnCluster {
final TableName tableName =
TableName.valueOf("testSplitRegionWithNoStoreFiles");
// Create table then get the single region for our new table.
- createTableAndWait(tableName.getName(), HConstants.CATALOG_FAMILY);
+ createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
List<HRegion> regions = cluster.getRegions(tableName);
HRegionInfo hri = getAndCheckSingleTableRegion(regions);
ensureTableRegionNotOnSameServerAsMeta(admin, hri);
@@ -841,7 +842,7 @@ public class TestSplitTransactionOnCluster {
LOG.info("Starting testSplitAndRestartingMaster");
final TableName tableName = TableName.valueOf("testSplitAndRestartingMaster");
// Create table then get the single region for our new table.
- createTableAndWait(tableName.getName(), HConstants.CATALOG_FAMILY);
+ createTableAndWait(tableName, HConstants.CATALOG_FAMILY);
List<HRegion> regions = cluster.getRegions(tableName);
HRegionInfo hri = getAndCheckSingleTableRegion(regions);
ensureTableRegionNotOnSameServerAsMeta(admin, hri);
@@ -880,19 +881,19 @@ public class TestSplitTransactionOnCluster {
@Test(timeout = 180000)
public void testSplitHooksBeforeAndAfterPONR() throws Exception {
- String firstTable = "testSplitHooksBeforeAndAfterPONR_1";
- String secondTable = "testSplitHooksBeforeAndAfterPONR_2";
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(firstTable));
+ TableName firstTable = TableName.valueOf("testSplitHooksBeforeAndAfterPONR_1");
+ TableName secondTable = TableName.valueOf("testSplitHooksBeforeAndAfterPONR_2");
+ HTableDescriptor desc = new HTableDescriptor(firstTable);
desc.addCoprocessor(MockedRegionObserver.class.getName());
HColumnDescriptor hcd = new HColumnDescriptor("cf");
desc.addFamily(hcd);
admin.createTable(desc);
- desc = new HTableDescriptor(TableName.valueOf(secondTable));
+ desc = new HTableDescriptor(secondTable);
hcd = new HColumnDescriptor("cf");
desc.addFamily(hcd);
admin.createTable(desc);
- List<HRegion> firstTableregions = cluster.getRegions(TableName.valueOf(firstTable));
- List<HRegion> secondTableRegions = cluster.getRegions(TableName.valueOf(secondTable));
+ List<HRegion> firstTableregions = cluster.getRegions(firstTable);
+ List<HRegion> secondTableRegions = cluster.getRegions(secondTable);
ServerName serverName =
cluster.getServerHoldingRegion(firstTableregions.get(0).getRegionName());
admin.move(secondTableRegions.get(0).getRegionInfo().getEncodedNameAsBytes(),
@@ -902,16 +903,16 @@ public class TestSplitTransactionOnCluster {
try {
table1 = new HTable(TESTING_UTIL.getConfiguration(), firstTable);
table2 = new HTable(TESTING_UTIL.getConfiguration(), firstTable);
- insertData(Bytes.toBytes(firstTable), admin, table1);
- insertData(Bytes.toBytes(secondTable), admin, table2);
- admin.split(Bytes.toBytes(firstTable), "row2".getBytes());
- firstTableregions = cluster.getRegions(Bytes.toBytes(firstTable));
+ insertData(firstTable, admin, table1);
+ insertData(secondTable, admin, table2);
+ admin.split(firstTable, "row2".getBytes());
+ firstTableregions = cluster.getRegions(firstTable);
while (firstTableregions.size() != 2) {
Thread.sleep(1000);
- firstTableregions = cluster.getRegions(Bytes.toBytes(firstTable));
+ firstTableregions = cluster.getRegions(firstTable);
}
assertEquals("Number of regions after split should be 2.", 2, firstTableregions.size());
- secondTableRegions = cluster.getRegions(Bytes.toBytes(secondTable));
+ secondTableRegions = cluster.getRegions(secondTable);
assertEquals("Number of regions after split should be 2.", 2, secondTableRegions.size());
} finally {
if (table1 != null) {
@@ -926,7 +927,7 @@ public class TestSplitTransactionOnCluster {
}
private void testSplitBeforeSettingSplittingInZKInternals() throws Exception {
- final byte[] tableName = Bytes.toBytes("testSplitBeforeSettingSplittingInZK");
+ final TableName tableName = TableName.valueOf("testSplitBeforeSettingSplittingInZK");
try {
// Create table then get the single region for our new table.
createTableAndWait(tableName, Bytes.toBytes("cf"));
@@ -1001,7 +1002,7 @@ public class TestSplitTransactionOnCluster {
return(null);
}
- private List<HRegion> checkAndGetDaughters(byte[] tableName)
+ private List<HRegion> checkAndGetDaughters(TableName tableName)
throws InterruptedException {
List<HRegion> daughters = null;
// try up to 10s
@@ -1126,7 +1127,7 @@ public class TestSplitTransactionOnCluster {
|| cluster.getLiveRegionServerThreads().size() > NB_SERVERS);
}
- private void awaitDaughters(byte[] tableName, int numDaughters) throws InterruptedException {
+ private void awaitDaughters(TableName tableName, int numDaughters) throws InterruptedException {
// Wait till regions are back on line again.
for (int i=0; cluster.getRegions(tableName).size() < numDaughters && i<60; i++) {
LOG.info("Waiting for repair to happen");
@@ -1137,7 +1138,7 @@ public class TestSplitTransactionOnCluster {
}
}
- private List<HRegion> awaitTableRegions(final byte[] tableName) throws InterruptedException {
+ private List<HRegion> awaitTableRegions(final TableName tableName) throws InterruptedException {
List<HRegion> regions = null;
for (int i = 0; i < 100; i++) {
regions = cluster.getRegions(tableName);
@@ -1147,11 +1148,11 @@ public class TestSplitTransactionOnCluster {
return regions;
}
- private HTable createTableAndWait(byte[] tableName, byte[] cf) throws IOException,
+ private HTable createTableAndWait(TableName tableName, byte[] cf) throws IOException,
InterruptedException {
HTable t = TESTING_UTIL.createTable(tableName, cf);
awaitTableRegions(tableName);
- assertTrue("Table not online: " + Bytes.toString(tableName),
+ assertTrue("Table not online: " + tableName,
cluster.getRegions(tableName).size() != 0);
return t;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6189b52f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
index 9b7ce0e..9bcdd82 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
@@ -126,15 +126,15 @@ public class TestLogRollAbort {
TableName.META_TABLE_NAME).close();
// Create the test table and open it
- String tableName = this.getClass().getSimpleName();
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+ TableName tableName = TableName.valueOf(this.getClass().getSimpleName());
+ HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
admin.createTable(desc);
- Table table = new HTable(TEST_UTIL.getConfiguration(), tableName);
+ Table table = new HTable(TEST_UTIL.getConfiguration(), desc.getTableName());
try {
- HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName));
+ HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(tableName);
HLog log = server.getWAL();
assertTrue("Need HDFS-826 for this test", ((FSHLog) log).canGetCurReplicas());