You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/08/08 08:08:31 UTC
svn commit: r1511591 [17/23] - in /hbase/branches/0.95:
hbase-client/src/main/java/org/apache/hadoop/hbase/
hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/
hbase-client/src/main/java/org/apache/hadoop/hbase/client/
hbase-client/src/main/jav...
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java Thu Aug 8 06:08:23 2013
@@ -27,6 +27,7 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.constraint.TestConstraint.CheckWasRunConstraint;
import org.apache.hadoop.hbase.constraint.WorksConstraint.NameConstraint;
@@ -43,7 +44,7 @@ public class TestConstraints {
@SuppressWarnings("unchecked")
@Test
public void testSimpleReadWrite() throws Throwable {
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(desc, WorksConstraint.class);
List<? extends Constraint> constraints = Constraints.getConstraints(desc,
@@ -68,7 +69,7 @@ public class TestConstraints {
@SuppressWarnings("unchecked")
@Test
public void testReadWriteWithConf() throws Throwable {
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(
desc,
new Pair<Class<? extends Constraint>, Configuration>(
@@ -101,7 +102,7 @@ public class TestConstraints {
@SuppressWarnings("unchecked")
@Test
public void testEnableDisableRemove() throws Exception {
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
// check general enabling/disabling of constraints
// first add a constraint
Constraints.add(desc, AllPassConstraint.class);
@@ -137,7 +138,7 @@ public class TestConstraints {
@SuppressWarnings("unchecked")
@Test
public void testUpdateConstraint() throws Exception {
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(desc, CheckConfigurationConstraint.class,
CheckWasRunConstraint.class);
Constraints.setConfiguration(desc, CheckConfigurationConstraint.class,
@@ -163,7 +164,7 @@ public class TestConstraints {
*/
@Test
public void testRemoveUnsetConstraint() throws Throwable {
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.remove(desc);
Constraints.remove(desc, AlsoWorks.class);
}
@@ -173,7 +174,7 @@ public class TestConstraints {
Configuration conf = new Configuration();
conf.setBoolean("_ENABLED", false);
conf.setLong("_PRIORITY", 10);
- HTableDescriptor desc = new HTableDescriptor("table");
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(desc, AlsoWorks.class, conf);
Constraints.add(desc, WorksConstraint.class);
assertFalse(Constraints.enabled(desc, AlsoWorks.class));
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java Thu Aug 8 06:08:23 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.util.Bytes;
/**
* Class for testing WALObserver coprocessor.
@@ -86,7 +87,7 @@ implements WALObserver {
HRegionInfo info, HLogKey logKey, WALEdit logEdit) throws IOException {
boolean bypass = false;
// check table name matches or not.
- if (!Arrays.equals(HRegionInfo.getTableName(info.getRegionName()), this.tableName)) {
+ if (!Bytes.equals(info.getTableName().getName(), this.tableName)) {
return bypass;
}
preWALWriteCalled = true;
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java Thu Aug 8 06:08:23 2013
@@ -27,7 +27,6 @@ import static org.junit.Assert.assertTru
import java.io.IOException;
import java.util.List;
import java.util.Map;
-import java.util.Arrays;
import java.util.NavigableSet;
import java.util.concurrent.atomic.AtomicInteger;
@@ -289,7 +288,7 @@ public class SimpleRegionObserver extend
assertNotNull(e.getRegion());
assertNotNull(get);
assertNotNull(results);
- if (Arrays.equals(e.getRegion().getTableDesc().getName(),
+ if (e.getRegion().getTableDesc().getTableName().equals(
TestRegionObserverInterface.TEST_TABLE)) {
boolean foundA = false;
boolean foundB = false;
@@ -321,7 +320,7 @@ public class SimpleRegionObserver extend
assertNotNull(e);
assertNotNull(e.getRegion());
assertNotNull(familyMap);
- if (Arrays.equals(e.getRegion().getTableDesc().getName(),
+ if (e.getRegion().getTableDesc().getTableName().equals(
TestRegionObserverInterface.TEST_TABLE)) {
List<? extends Cell> cells = familyMap.get(TestRegionObserverInterface.A);
assertNotNull(cells);
@@ -355,7 +354,7 @@ public class SimpleRegionObserver extend
assertNotNull(e.getRegion());
assertNotNull(familyMap);
List<? extends Cell> cells = familyMap.get(TestRegionObserverInterface.A);
- if (Arrays.equals(e.getRegion().getTableDesc().getName(),
+ if (e.getRegion().getTableDesc().getTableName().equals(
TestRegionObserverInterface.TEST_TABLE)) {
assertNotNull(cells);
assertNotNull(cells.get(0));
@@ -471,7 +470,7 @@ public class SimpleRegionObserver extend
RegionCoprocessorEnvironment e = ctx.getEnvironment();
assertNotNull(e);
assertNotNull(e.getRegion());
- if (Arrays.equals(e.getRegion().getTableDesc().getName(),
+ if (e.getRegion().getTableDesc().getTableName().equals(
TestRegionObserverInterface.TEST_TABLE)) {
assertNotNull(familyPaths);
assertEquals(1,familyPaths.size());
@@ -489,7 +488,7 @@ public class SimpleRegionObserver extend
RegionCoprocessorEnvironment e = ctx.getEnvironment();
assertNotNull(e);
assertNotNull(e.getRegion());
- if (Arrays.equals(e.getRegion().getTableDesc().getName(),
+ if (e.getRegion().getTableDesc().getTableName().equals(
TestRegionObserverInterface.TEST_TABLE)) {
assertNotNull(familyPaths);
assertEquals(1,familyPaths.size());
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java Thu Aug 8 06:08:23 2013
@@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.RegionLoa
import java.io.*;
import java.util.*;
-import java.util.jar.*;
import org.junit.*;
import org.junit.experimental.categories.Category;
@@ -139,7 +138,7 @@ public class TestClassLoading {
LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS2);
// create a table that references the coprocessors
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
htd.addFamily(new HColumnDescriptor("test"));
// without configuration values
htd.setValue("COPROCESSOR$1", jarFileOnHDFS1.toString() + "|" + cpName1 +
@@ -161,7 +160,7 @@ public class TestClassLoading {
byte[] startKey = {10, 63};
byte[] endKey = {12, 43};
admin.createTable(htd, startKey, endKey, 4);
- waitForTable(htd.getName());
+ waitForTable(htd.getTableName());
// verify that the coprocessors were loaded
boolean foundTableRegion=false;
@@ -233,13 +232,13 @@ public class TestClassLoading {
File jarFile = buildCoprocessorJar(cpName3);
// create a table that references the jar
- HTableDescriptor htd = new HTableDescriptor(cpName3);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(cpName3));
htd.addFamily(new HColumnDescriptor("test"));
htd.setValue("COPROCESSOR$1", getLocalPath(jarFile) + "|" + cpName3 + "|" +
Coprocessor.PRIORITY_USER);
HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
admin.createTable(htd);
- waitForTable(htd.getName());
+ waitForTable(htd.getTableName());
// verify that the coprocessor was loaded
boolean found = false;
@@ -259,13 +258,13 @@ public class TestClassLoading {
File jarFile = buildCoprocessorJar(cpName4);
// create a table that references the jar
- HTableDescriptor htd = new HTableDescriptor(cpName4);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(cpName4));
htd.addFamily(new HColumnDescriptor("test"));
htd.setValue("COPROCESSOR$1", getLocalPath(jarFile) + "|" + cpName4 + "|" +
Coprocessor.PRIORITY_USER);
HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
admin.createTable(htd);
- waitForTable(htd.getName());
+ waitForTable(htd.getTableName());
// verify that the coprocessor was loaded correctly
boolean found = false;
@@ -307,7 +306,7 @@ public class TestClassLoading {
" | org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver | | k=v ";
// create a table that references the jar
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
htd.addFamily(new HColumnDescriptor("test"));
// add 3 coprocessors by setting htd attributes directly.
@@ -333,7 +332,7 @@ public class TestClassLoading {
admin.deleteTable(tableName);
}
admin.createTable(htd);
- waitForTable(htd.getName());
+ waitForTable(htd.getTableName());
// verify that the coprocessor was loaded
boolean found_2 = false, found_1 = false, found_3 = false,
@@ -409,7 +408,7 @@ public class TestClassLoading {
LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS);
// create a table that references the coprocessors
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
htd.addFamily(new HColumnDescriptor("test"));
// without configuration values
htd.setValue("COPROCESSOR$1", jarFileOnHDFS.toString() + "|" + cpName1 +
@@ -425,7 +424,7 @@ public class TestClassLoading {
admin.deleteTable(tableName);
}
admin.createTable(htd);
- waitForTable(htd.getName());
+ waitForTable(htd.getTableName());
// verify that the coprocessors were loaded
boolean found1 = false, found2 = false, found2_k1 = false,
@@ -542,9 +541,9 @@ public class TestClassLoading {
assertEquals(loadedMasterCoprocessorsVerify, loadedMasterCoprocessors);
}
- private void waitForTable(byte[] name) throws InterruptedException, IOException {
+ private void waitForTable(TableName name) throws InterruptedException, IOException {
// First wait until all regions are online
- TEST_UTIL.waitTableEnabled(name);
+ TEST_UTIL.waitTableEnabled(name.getName());
// Now wait a bit longer for the coprocessor hosts to load the CPs
Thread.sleep(1000);
}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java Thu Aug 8 06:08:23 2013
@@ -29,6 +29,7 @@ import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -65,7 +66,8 @@ import static org.junit.Assert.fail;
public class TestCoprocessorEndpoint {
private static final Log LOG = LogFactory.getLog(TestCoprocessorEndpoint.class);
- private static final byte[] TEST_TABLE = Bytes.toBytes("TestTable");
+ private static final TableName TEST_TABLE =
+ TableName.valueOf("TestTable");
private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily");
private static final byte[] TEST_QUALIFIER = Bytes.toBytes("TestQualifier");
private static byte[] ROW = Bytes.toBytes("testRow");
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java Thu Aug 8 06:08:23 2013
@@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -52,7 +53,6 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.SplitTransaction;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
@@ -263,7 +263,8 @@ public class TestCoprocessorInterface ex
}
public void testSharedData() throws IOException {
- byte [] tableName = Bytes.toBytes("testtable");
+ TableName tableName =
+ TableName.valueOf("testtable");
byte [][] families = { fam1, fam2, fam3 };
Configuration hc = initSplit();
@@ -340,7 +341,8 @@ public class TestCoprocessorInterface ex
}
public void testCoprocessorInterface() throws IOException {
- byte [] tableName = Bytes.toBytes("testtable");
+ TableName tableName =
+ TableName.valueOf("testtable");
byte [][] families = { fam1, fam2, fam3 };
Configuration hc = initSplit();
@@ -417,7 +419,7 @@ public class TestCoprocessorInterface ex
return r;
}
- HRegion initHRegion (byte [] tableName, String callingMethod,
+ HRegion initHRegion (TableName tableName, String callingMethod,
Configuration conf, Class<?> [] implClasses, byte [][] families)
throws IOException {
HTableDescriptor htd = new HTableDescriptor(tableName);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java Thu Aug 8 06:08:23 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -80,7 +81,7 @@ public class TestMasterCoprocessorExcept
public void run() {
// create a table : master coprocessor will throw an exception and not
// catch it.
- HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TEST_TABLE));
htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
try {
HBaseAdmin admin = UTIL.getHBaseAdmin();
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java Thu Aug 8 06:08:23 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -183,7 +184,7 @@ public class TestMasterCoprocessorExcept
BuggyMasterObserver.class.getName();
assertTrue(master.getLoadedCoprocessors().contains(coprocessorName));
- HTableDescriptor htd1 = new HTableDescriptor(TEST_TABLE1);
+ HTableDescriptor htd1 = new HTableDescriptor(TableName.valueOf(TEST_TABLE1));
htd1.addFamily(new HColumnDescriptor(TEST_FAMILY1));
boolean threwDNRE = false;
@@ -215,7 +216,7 @@ public class TestMasterCoprocessorExcept
// Verify that BuggyMasterObserver has been removed due to its misbehavior
// by creating another table: should not have a problem this time.
- HTableDescriptor htd2 = new HTableDescriptor(TEST_TABLE2);
+ HTableDescriptor htd2 = new HTableDescriptor(TableName.valueOf(TEST_TABLE2));
htd2.addFamily(new HColumnDescriptor(TEST_FAMILY2));
HBaseAdmin admin = UTIL.getHBaseAdmin();
try {
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java Thu Aug 8 06:08:23 2013
@@ -73,6 +73,12 @@ public class TestMasterObserver {
private boolean postDeleteTableCalled;
private boolean preModifyTableCalled;
private boolean postModifyTableCalled;
+ private boolean preCreateNamespaceCalled;
+ private boolean postCreateNamespaceCalled;
+ private boolean preDeleteNamespaceCalled;
+ private boolean postDeleteNamespaceCalled;
+ private boolean preModifyNamespaceCalled;
+ private boolean postModifyNamespaceCalled;
private boolean preAddColumnCalled;
private boolean postAddColumnCalled;
private boolean preModifyColumnCalled;
@@ -138,6 +144,12 @@ public class TestMasterObserver {
postDeleteTableCalled = false;
preModifyTableCalled = false;
postModifyTableCalled = false;
+ preCreateNamespaceCalled = false;
+ postCreateNamespaceCalled = false;
+ preDeleteNamespaceCalled = false;
+ postDeleteNamespaceCalled = false;
+ preModifyNamespaceCalled = false;
+ postModifyNamespaceCalled = false;
preAddColumnCalled = false;
postAddColumnCalled = false;
preModifyColumnCalled = false;
@@ -215,7 +227,7 @@ public class TestMasterObserver {
@Override
public void preDeleteTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
if (bypass) {
env.bypass();
}
@@ -224,7 +236,7 @@ public class TestMasterObserver {
@Override
public void postDeleteTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
postDeleteTableCalled = true;
}
@@ -238,7 +250,7 @@ public class TestMasterObserver {
@Override
public void preModifyTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HTableDescriptor htd) throws IOException {
+ TableName tableName, HTableDescriptor htd) throws IOException {
if (bypass) {
env.bypass();
}else{
@@ -249,7 +261,7 @@ public class TestMasterObserver {
@Override
public void postModifyTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HTableDescriptor htd) throws IOException {
+ TableName tableName, HTableDescriptor htd) throws IOException {
postModifyTableCalled = true;
}
@@ -262,8 +274,77 @@ public class TestMasterObserver {
}
@Override
+ public void preCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ NamespaceDescriptor ns) throws IOException {
+ if (bypass) {
+ env.bypass();
+ }
+ preCreateNamespaceCalled = true;
+ }
+
+ @Override
+ public void postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ NamespaceDescriptor ns) throws IOException {
+ postCreateNamespaceCalled = true;
+ }
+
+ public boolean wasCreateNamespaceCalled() {
+ return preCreateNamespaceCalled && postCreateNamespaceCalled;
+ }
+
+ public boolean preCreateNamespaceCalledOnly() {
+ return preCreateNamespaceCalled && !postCreateNamespaceCalled;
+ }
+
+ @Override
+ public void preDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ String name) throws IOException {
+ if (bypass) {
+ env.bypass();
+ }
+ preDeleteNamespaceCalled = true;
+ }
+
+ @Override
+ public void postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ String name) throws IOException {
+ postDeleteNamespaceCalled = true;
+ }
+
+ public boolean wasDeleteNamespaceCalled() {
+ return preDeleteNamespaceCalled && postDeleteNamespaceCalled;
+ }
+
+ public boolean preDeleteNamespaceCalledOnly() {
+ return preDeleteNamespaceCalled && !postDeleteNamespaceCalled;
+ }
+
+ @Override
+ public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ NamespaceDescriptor ns) throws IOException {
+ if (bypass) {
+ env.bypass();
+ }
+ preModifyNamespaceCalled = true;
+ }
+
+ @Override
+ public void postModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> env,
+ NamespaceDescriptor ns) throws IOException {
+ postModifyNamespaceCalled = true;
+ }
+
+ public boolean wasModifyNamespaceCalled() {
+ return preModifyNamespaceCalled && postModifyNamespaceCalled;
+ }
+
+ public boolean preModifyNamespaceCalledOnly() {
+ return preModifyNamespaceCalled && !postModifyNamespaceCalled;
+ }
+
+ @Override
public void preAddColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HColumnDescriptor column) throws IOException {
+ TableName tableName, HColumnDescriptor column) throws IOException {
if (bypass) {
env.bypass();
}else{
@@ -275,7 +356,7 @@ public class TestMasterObserver {
@Override
public void postAddColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HColumnDescriptor column) throws IOException {
+ TableName tableName, HColumnDescriptor column) throws IOException {
postAddColumnCalled = true;
}
@@ -289,7 +370,7 @@ public class TestMasterObserver {
@Override
public void preModifyColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HColumnDescriptor descriptor) throws IOException {
+ TableName tableName, HColumnDescriptor descriptor) throws IOException {
if (bypass) {
env.bypass();
}
@@ -298,7 +379,7 @@ public class TestMasterObserver {
@Override
public void postModifyColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, HColumnDescriptor descriptor) throws IOException {
+ TableName tableName, HColumnDescriptor descriptor) throws IOException {
postModifyColumnCalled = true;
}
@@ -312,7 +393,7 @@ public class TestMasterObserver {
@Override
public void preDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, byte[] c) throws IOException {
+ TableName tableName, byte[] c) throws IOException {
if (bypass) {
env.bypass();
}
@@ -321,7 +402,7 @@ public class TestMasterObserver {
@Override
public void postDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName, byte[] c) throws IOException {
+ TableName tableName, byte[] c) throws IOException {
postDeleteColumnCalled = true;
}
@@ -335,7 +416,7 @@ public class TestMasterObserver {
@Override
public void preEnableTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
if (bypass) {
env.bypass();
}
@@ -344,7 +425,7 @@ public class TestMasterObserver {
@Override
public void postEnableTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
postEnableTableCalled = true;
}
@@ -358,7 +439,7 @@ public class TestMasterObserver {
@Override
public void preDisableTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
if (bypass) {
env.bypass();
}
@@ -367,7 +448,7 @@ public class TestMasterObserver {
@Override
public void postDisableTable(ObserverContext<MasterCoprocessorEnvironment> env,
- byte[] tableName) throws IOException {
+ TableName tableName) throws IOException {
postDisableTableCalled = true;
}
@@ -654,7 +735,7 @@ public class TestMasterObserver {
@Override
public void preDeleteTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName)
throws IOException {
if (bypass) {
env.bypass();
@@ -664,7 +745,7 @@ public class TestMasterObserver {
@Override
public void postDeleteTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName)
throws IOException {
postDeleteTableHandlerCalled = true;
}
@@ -678,7 +759,7 @@ public class TestMasterObserver {
}
@Override
public void preModifyTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName,
HTableDescriptor htd) throws IOException {
if (bypass) {
env.bypass();
@@ -688,7 +769,7 @@ public class TestMasterObserver {
@Override
public void postModifyTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName,
HTableDescriptor htd) throws IOException {
postModifyTableHandlerCalled = true;
}
@@ -703,7 +784,7 @@ public class TestMasterObserver {
@Override
public void preAddColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName,
HColumnDescriptor column) throws IOException {
if (bypass) {
env.bypass();
@@ -713,7 +794,7 @@ public class TestMasterObserver {
@Override
public void postAddColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName,
HColumnDescriptor column) throws IOException {
postAddColumnHandlerCalled = true;
}
@@ -727,7 +808,7 @@ public class TestMasterObserver {
@Override
public void preModifyColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName,
HColumnDescriptor descriptor) throws IOException {
if (bypass) {
env.bypass();
@@ -737,7 +818,7 @@ public class TestMasterObserver {
@Override
public void postModifyColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName,
HColumnDescriptor descriptor) throws IOException {
postModifyColumnHandlerCalled = true;
}
@@ -751,7 +832,7 @@ public class TestMasterObserver {
}
@Override
public void preDeleteColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName,
byte[] c) throws IOException {
if (bypass) {
env.bypass();
@@ -761,7 +842,7 @@ public class TestMasterObserver {
@Override
public void postDeleteColumnHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName,
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName,
byte[] c) throws IOException {
postDeleteColumnHandlerCalled = true;
}
@@ -776,7 +857,7 @@ public class TestMasterObserver {
@Override
public void preEnableTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName)
throws IOException {
if (bypass) {
env.bypass();
@@ -786,7 +867,7 @@ public class TestMasterObserver {
@Override
public void postEnableTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName)
throws IOException {
postEnableTableHandlerCalled = true;
}
@@ -801,7 +882,7 @@ public class TestMasterObserver {
@Override
public void preDisableTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> env, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> env, TableName tableName)
throws IOException {
if (bypass) {
env.bypass();
@@ -811,7 +892,7 @@ public class TestMasterObserver {
@Override
public void postDisableTableHandler(
- ObserverContext<MasterCoprocessorEnvironment> ctx, byte[] tableName)
+ ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName)
throws IOException {
postDisableTableHandlerCalled = true;
}
@@ -826,7 +907,8 @@ public class TestMasterObserver {
@Override
public void preGetTableDescriptors(ObserverContext<MasterCoprocessorEnvironment> ctx,
- List<String> tableNamesList, List<HTableDescriptor> descriptors) throws IOException {
+ List<TableName> tableNamesList, List<HTableDescriptor> descriptors)
+ throws IOException {
preGetTableDescriptorsCalled = true;
}
@@ -843,7 +925,8 @@ public class TestMasterObserver {
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static byte[] TEST_SNAPSHOT = Bytes.toBytes("observed_snapshot");
- private static byte[] TEST_TABLE = Bytes.toBytes("observed_table");
+ private static TableName TEST_TABLE =
+ TableName.valueOf("observed_table");
private static byte[] TEST_CLONE = Bytes.toBytes("observed_clone");
private static byte[] TEST_FAMILY = Bytes.toBytes("fam1");
private static byte[] TEST_FAMILY2 = Bytes.toBytes("fam2");
@@ -1103,12 +1186,64 @@ public class TestMasterObserver {
}
}
- private void modifyTableSync(HBaseAdmin admin, byte[] tableName, HTableDescriptor htd)
+ @Test
+ public void testNamespaceOperations() throws Exception {
+ MiniHBaseCluster cluster = UTIL.getHBaseCluster();
+ String testNamespace = "observed_ns";
+ HMaster master = cluster.getMaster();
+ MasterCoprocessorHost host = master.getCoprocessorHost();
+ CPMasterObserver cp = (CPMasterObserver)host.findCoprocessor(
+ CPMasterObserver.class.getName());
+
+ cp.enableBypass(false);
+ cp.resetStates();
+
+
+ // create a table
+ HBaseAdmin admin = UTIL.getHBaseAdmin();
+ admin.createNamespace(NamespaceDescriptor.create(testNamespace).build());
+ assertTrue("Test namespace should be created", cp.wasCreateNamespaceCalled());
+
+ assertNotNull(admin.getNamespaceDescriptor(testNamespace));
+
+ // turn off bypass, run the tests again
+ cp.enableBypass(true);
+ cp.resetStates();
+
+ admin.modifyNamespace(NamespaceDescriptor.create(testNamespace).build());
+ assertTrue("Test namespace should not have been modified",
+ cp.preModifyNamespaceCalledOnly());
+
+ assertNotNull(admin.getNamespaceDescriptor(testNamespace));
+
+ admin.deleteNamespace(testNamespace);
+ assertTrue("Test namespace should not have been deleted", cp.preDeleteNamespaceCalledOnly());
+
+ assertNotNull(admin.getNamespaceDescriptor(testNamespace));
+
+ cp.enableBypass(false);
+ cp.resetStates();
+
+ // delete table
+ admin.modifyNamespace(NamespaceDescriptor.create(testNamespace).build());
+ assertTrue("Test namespace should have been modified", cp.wasModifyNamespaceCalled());
+
+ admin.deleteNamespace(testNamespace);
+ assertTrue("Test namespace should have been deleted", cp.wasDeleteNamespaceCalled());
+
+ cp.enableBypass(true);
+ cp.resetStates();
+
+ admin.createNamespace(NamespaceDescriptor.create(testNamespace).build());
+ assertTrue("Test namespace should not be created", cp.preCreateNamespaceCalledOnly());
+ }
+
+ private void modifyTableSync(HBaseAdmin admin, TableName tableName, HTableDescriptor htd)
throws IOException {
admin.modifyTable(tableName, htd);
//wait until modify table finishes
for (int t = 0; t < 100; t++) { //10 sec timeout
- HTableDescriptor td = admin.getTableDescriptor(htd.getName());
+ HTableDescriptor td = admin.getTableDescriptor(htd.getTableName());
if (td.equals(htd)) {
break;
}
@@ -1228,7 +1363,7 @@ public class TestMasterObserver {
cp.resetStates();
GetTableDescriptorsRequest req =
- RequestConverter.buildGetTableDescriptorsRequest((List<String>)null);
+ RequestConverter.buildGetTableDescriptorsRequest((List<TableName>)null);
master.getTableDescriptors(null, req);
assertTrue("Coprocessor should be called on table descriptors request",
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java Thu Aug 8 06:08:23 2013
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTru
import java.io.IOException;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -35,11 +36,7 @@ import org.apache.hadoop.hbase.client.Pu
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
-import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -50,7 +47,8 @@ import org.junit.experimental.categories
@Category(MediumTests.class)
public class TestOpenTableInCoprocessor {
- private static final byte[] otherTable = Bytes.toBytes("otherTable");
+ private static final TableName otherTable =
+ TableName.valueOf("otherTable");
private static final byte[] family = new byte[] { 'f' };
private static boolean completed = false;
@@ -83,7 +81,7 @@ public class TestOpenTableInCoprocessor
@Test
public void testCoprocessorCanCreateConnectionToRemoteTable() throws Throwable {
- HTableDescriptor primary = new HTableDescriptor("primary");
+ HTableDescriptor primary = new HTableDescriptor(TableName.valueOf("primary"));
primary.addFamily(new HColumnDescriptor(family));
// add our coprocessor
primary.addCoprocessor(SendToOtherTableCoprocessor.class.getName());
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java Thu Aug 8 06:08:23 2013
@@ -27,7 +27,6 @@ import static org.junit.Assert.assertTru
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -36,6 +35,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -76,7 +76,8 @@ import org.junit.experimental.categories
public class TestRegionObserverInterface {
static final Log LOG = LogFactory.getLog(TestRegionObserverInterface.class);
- public static final byte[] TEST_TABLE = Bytes.toBytes("TestTable");
+ public static final TableName TEST_TABLE =
+ TableName.valueOf("TestTable");
public final static byte[] A = Bytes.toBytes("a");
public final static byte[] B = Bytes.toBytes("b");
public final static byte[] C = Bytes.toBytes("c");
@@ -103,7 +104,7 @@ public class TestRegionObserverInterface
@Test
public void testRegionObserver() throws IOException {
- byte[] tableName = TEST_TABLE;
+ TableName tableName = TEST_TABLE;
// recreate table every time in order to reset the status of the
// coprocessor.
HTable table = util.createTable(tableName, new byte[][] {A, B, C});
@@ -167,7 +168,7 @@ public class TestRegionObserverInterface
@Test
public void testRowMutation() throws IOException {
- byte[] tableName = TEST_TABLE;
+ TableName tableName = TEST_TABLE;
HTable table = util.createTable(tableName, new byte[][] {A, B, C});
verifyMethodResult(SimpleRegionObserver.class,
new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut",
@@ -202,7 +203,7 @@ public class TestRegionObserverInterface
@Test
public void testIncrementHook() throws IOException {
- byte[] tableName = TEST_TABLE;
+ TableName tableName = TEST_TABLE;
HTable table = util.createTable(tableName, new byte[][] {A, B, C});
Increment inc = new Increment(Bytes.toBytes(0));
@@ -228,7 +229,8 @@ public class TestRegionObserverInterface
@Test
// HBase-3583
public void testHBase3583() throws IOException {
- byte[] tableName = Bytes.toBytes("testHBase3583");
+ TableName tableName =
+ TableName.valueOf("testHBase3583");
util.createTable(tableName, new byte[][] {A, B, C});
verifyMethodResult(SimpleRegionObserver.class,
@@ -278,7 +280,8 @@ public class TestRegionObserverInterface
@Test
// HBase-3758
public void testHBase3758() throws IOException {
- byte[] tableName = Bytes.toBytes("testHBase3758");
+ TableName tableName =
+ TableName.valueOf("testHBase3758");
util.createTable(tableName, new byte[][] {A, B, C});
verifyMethodResult(SimpleRegionObserver.class,
@@ -389,7 +392,7 @@ public class TestRegionObserverInterface
admin.deleteTable(compactTable);
}
- HTableDescriptor htd = new HTableDescriptor(compactTable);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(compactTable));
htd.addFamily(new HColumnDescriptor(A));
htd.addCoprocessor(EvenOnlyCompactor.class.getName());
admin.createTable(htd);
@@ -454,7 +457,7 @@ public class TestRegionObserverInterface
@Test
public void bulkLoadHFileTest() throws Exception {
String testName = TestRegionObserverInterface.class.getName()+".bulkLoadHFileTest";
- byte[] tableName = TEST_TABLE;
+ TableName tableName = TEST_TABLE;
Configuration conf = util.getConfiguration();
HTable table = util.createTable(tableName, new byte[][] {A, B, C});
@@ -483,12 +486,12 @@ public class TestRegionObserverInterface
}
// check each region whether the coprocessor upcalls are called or not.
- private void verifyMethodResult(Class c, String methodName[], byte[] tableName,
+ private void verifyMethodResult(Class c, String methodName[], TableName tableName,
Object value[]) throws IOException {
try {
for (JVMClusterUtil.RegionServerThread t : cluster.getRegionServerThreads()) {
for (HRegionInfo r : ProtobufUtil.getOnlineRegions(t.getRegionServer())) {
- if (!Arrays.equals(r.getTableName(), tableName)) {
+ if (!r.getTableName().equals(tableName)) {
continue;
}
RegionCoprocessorHost cph = t.getRegionServer().getOnlineRegion(r.getRegionName()).
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java Thu Aug 8 06:08:23 2013
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.HTableDes
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
@@ -140,11 +141,11 @@ public class TestRegionObserverScannerOp
HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf,
byte[]... families) throws IOException {
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Path path = new Path(DIR + callingMethod);
HRegion r = HRegion.createHRegion(info, path, conf, htd);
// this following piece is a hack. currently a coprocessorHost
@@ -220,7 +221,7 @@ public class TestRegionObserverScannerOp
String tableName = "testRegionObserverCompactionTimeStacking";
byte[] ROW = Bytes.toBytes("testRow");
byte[] A = Bytes.toBytes("A");
- HTableDescriptor desc = new HTableDescriptor(tableName);
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
desc.addFamily(new HColumnDescriptor(A));
desc.addCoprocessor(EmptyRegionObsever.class.getName(), null, Coprocessor.PRIORITY_USER, null);
desc.addCoprocessor(NoDataFromCompaction.class.getName(), null, Coprocessor.PRIORITY_HIGHEST,
@@ -229,7 +230,7 @@ public class TestRegionObserverScannerOp
HBaseAdmin admin = UTIL.getHBaseAdmin();
admin.createTable(desc);
- HTable table = new HTable(conf, desc.getName());
+ HTable table = new HTable(conf, desc.getTableName());
// put a row and flush it to disk
Put put = new Put(ROW);
@@ -237,8 +238,8 @@ public class TestRegionObserverScannerOp
table.put(put);
table.flushCommits();
- HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getName());
- List<HRegion> regions = rs.getOnlineRegions(desc.getName());
+ HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getTableName());
+ List<HRegion> regions = rs.getOnlineRegions(desc.getTableName());
assertEquals("More than 1 region serving test table with 1 row", 1, regions.size());
HRegion region = regions.get(0);
admin.flush(region.getRegionName());
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java Thu Aug 8 06:08:23 2013
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -94,11 +95,11 @@ public class TestRegionObserverStacking
HRegion initHRegion (byte [] tableName, String callingMethod,
Configuration conf, byte [] ... families) throws IOException {
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
for(byte [] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Path path = new Path(DIR + callingMethod);
HRegion r = HRegion.createHRegion(info, path, conf, htd);
// this following piece is a hack. currently a coprocessorHost
@@ -112,7 +113,7 @@ public class TestRegionObserverStacking
public void testRegionObserverStacking() throws Exception {
byte[] ROW = Bytes.toBytes("testRow");
- byte[] TABLE = Bytes.toBytes(getClass().getName());
+ byte[] TABLE = Bytes.toBytes(this.getClass().getSimpleName());
byte[] A = Bytes.toBytes("A");
byte[][] FAMILIES = new byte[][] { A } ;
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java Thu Aug 8 06:08:23 2013
@@ -49,7 +49,8 @@ import static org.junit.Assert.*;
public class TestRegionServerCoprocessorExceptionWithAbort {
static final Log LOG = LogFactory.getLog(TestRegionServerCoprocessorExceptionWithAbort.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final String TABLE_NAME = "observed_table";
+ private static final TableName TABLE_NAME =
+ TableName.valueOf("observed_table");
@BeforeClass
public static void setupBeforeClass() throws Exception {
@@ -72,7 +73,7 @@ public class TestRegionServerCoprocessor
// When we try to write to TEST_TABLE, the buggy coprocessor will
// cause a NullPointerException, which will cause the regionserver (which
// hosts the region we attempted to write to) to abort.
- byte[] TEST_TABLE = Bytes.toBytes(TABLE_NAME);
+ TableName TEST_TABLE = TABLE_NAME;
byte[] TEST_FAMILY = Bytes.toBytes("aaa");
HTable table = TEST_UTIL.createTable(TEST_TABLE, TEST_FAMILY);
@@ -102,7 +103,8 @@ public class TestRegionServerCoprocessor
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
final Put put, final WALEdit edit,
final Durability durability) {
- String tableName = c.getEnvironment().getRegion().getRegionInfo().getTableNameAsString();
+ TableName tableName =
+ c.getEnvironment().getRegion().getRegionInfo().getTableName();
if (TABLE_NAME.equals(tableName)) {
throw new NullPointerException("Buggy coprocessor");
}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java Thu Aug 8 06:08:23 2013
@@ -53,7 +53,8 @@ public class TestRegionServerCoprocessor
final Put put, final WALEdit edit,
final Durability durability) {
String tableName =
- c.getEnvironment().getRegion().getRegionInfo().getTableNameAsString();
+ c.getEnvironment().getRegion().getRegionInfo()
+ .getTableName().getNameAsString();
if (tableName.equals("observed_table")) {
Integer i = null;
i = i + 1;
@@ -88,7 +89,8 @@ public class TestRegionServerCoprocessor
// execute, which will set the rsZKNodeDeleted flag to true, which will
// pass this test.
- byte[] TEST_TABLE = Bytes.toBytes("observed_table");
+ TableName TEST_TABLE =
+ TableName.valueOf("observed_table");
byte[] TEST_FAMILY = Bytes.toBytes("aaa");
HTable table = TEST_UTIL.createTable(TEST_TABLE, TEST_FAMILY);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java Thu Aug 8 06:08:23 2013
@@ -220,16 +220,16 @@ public class TestWALObserver {
public void testWALCoprocessorReplay() throws Exception {
// WAL replay is handled at HRegion::replayRecoveredEdits(), which is
// ultimately called by HRegion::initialize()
- byte[] tableName = Bytes.toBytes("testWALCoprocessorReplay");
- final HTableDescriptor htd = getBasic3FamilyHTableDescriptor(Bytes
- .toString(tableName));
+ TableName tableName = TableName.valueOf("testWALCoprocessorReplay");
+ final HTableDescriptor htd = getBasic3FamilyHTableDescriptor(tableName);
// final HRegionInfo hri =
// createBasic3FamilyHRegionInfo(Bytes.toString(tableName));
// final HRegionInfo hri1 =
// createBasic3FamilyHRegionInfo(Bytes.toString(tableName));
final HRegionInfo hri = new HRegionInfo(tableName, null, null);
- final Path basedir = new Path(this.hbaseRootDir, Bytes.toString(tableName));
+ final Path basedir =
+ FSUtils.getTableDir(this.hbaseRootDir, tableName);
deleteDir(basedir);
fs.mkdirs(new Path(basedir, hri.getEncodedName()));
@@ -306,13 +306,13 @@ public class TestWALObserver {
* @param tableName Name of table to use when we create HTableDescriptor.
*/
private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) {
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
for (int i = 0; i < TEST_FAMILY.length; i++) {
HColumnDescriptor a = new HColumnDescriptor(TEST_FAMILY[i]);
htd.addFamily(a);
}
- return new HRegionInfo(htd.getName(), null, null, false);
+ return new HRegionInfo(htd.getTableName(), null, null, false);
}
/*
@@ -367,7 +367,7 @@ public class TestWALObserver {
return HLogFactory.createHLog(FileSystem.get(c), hbaseRootDir, logName, c);
}
- private void addWALEdits(final byte[] tableName, final HRegionInfo hri,
+ private void addWALEdits(final TableName tableName, final HRegionInfo hri,
final byte[] rowName, final byte[] family, final int count,
EnvironmentEdge ee, final HLog wal, final HTableDescriptor htd)
throws IOException {
@@ -383,7 +383,7 @@ public class TestWALObserver {
}
private HTableDescriptor getBasic3FamilyHTableDescriptor(
- final String tableName) {
+ final TableName tableName) {
HTableDescriptor htd = new HTableDescriptor(tableName);
for (int i = 0; i < TEST_FAMILY.length; i++) {
@@ -394,7 +394,7 @@ public class TestWALObserver {
}
private HTableDescriptor createBasic3FamilyHTD(final String tableName) {
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a"));
htd.addFamily(a);
HColumnDescriptor b = new HColumnDescriptor(Bytes.toBytes("b"));
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java Thu Aug 8 06:08:23 2013
@@ -46,9 +46,9 @@ public class TestColumnPrefixFilter {
@Test
public void testColumnPrefixFilter() throws IOException {
String family = "Family";
- HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter"));
htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3));
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HRegion.createHRegion(info, TEST_UTIL.
getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
try {
@@ -108,9 +108,9 @@ public class TestColumnPrefixFilter {
@Test
public void testColumnPrefixFilterWithFilterList() throws IOException {
String family = "Family";
- HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter"));
htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3));
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HRegion.createHRegion(info, TEST_UTIL.
getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
try {
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java Thu Aug 8 06:08:23 2013
@@ -71,14 +71,14 @@ public class TestDependentColumnFilter {
public void setUp() throws Exception {
testVals = makeTestVals();
- HTableDescriptor htd = new HTableDescriptor(this.getClass().getName());
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(this.getClass().getSimpleName()));
HColumnDescriptor hcd0 = new HColumnDescriptor(FAMILIES[0]);
hcd0.setMaxVersions(3);
htd.addFamily(hcd0);
HColumnDescriptor hcd1 = new HColumnDescriptor(FAMILIES[1]);
hcd1.setMaxVersions(3);
htd.addFamily(hcd1);
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(),
TEST_UTIL.getConfiguration(), htd);
addData();
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java Thu Aug 8 06:08:23 2013
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
@@ -129,7 +130,7 @@ public class TestFilter {
@Before
public void setUp() throws Exception {
- HTableDescriptor htd = new HTableDescriptor("TestFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestFilter"));
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
htd.addFamily(new HColumnDescriptor(FAMILIES_1[0]));
@@ -137,7 +138,7 @@ public class TestFilter {
htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[0]));
htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[1]));
htd.addFamily(new HColumnDescriptor(FAMILIES_1[1]));
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(),
TEST_UTIL.getConfiguration(), htd);
@@ -1770,9 +1771,9 @@ public class TestFilter {
public void testNestedFilterListWithSCVF() throws IOException {
byte[] columnStatus = Bytes.toBytes("S");
- HTableDescriptor htd = new HTableDescriptor("testNestedFilterListWithSCVF");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testNestedFilterListWithSCVF"));
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion testRegion = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(),
TEST_UTIL.getConfiguration(), htd);
for(int i=0; i<10; i++) {
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java Thu Aug 8 06:08:23 2013
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.ipc.RpcClient;
@@ -135,7 +136,7 @@ public class TestFilterWithScanLimits {
assertNotNull("HBaseAdmin is not initialized successfully.", admin);
if (admin != null) {
- HTableDescriptor desc = new HTableDescriptor(name);
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name));
HColumnDescriptor coldef = new HColumnDescriptor(Bytes.toBytes("f1"));
desc.addFamily(coldef);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java Thu Aug 8 06:08:23 2013
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -140,7 +141,7 @@ public class TestFilterWrapper {
assertNotNull("HBaseAdmin is not initialized successfully.", admin);
if (admin != null) {
- HTableDescriptor desc = new HTableDescriptor(name);
+ HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name));
HColumnDescriptor coldef = new HColumnDescriptor(Bytes.toBytes("f1"));
desc.addFamily(coldef);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java Thu Aug 8 06:08:23 2013
@@ -46,12 +46,12 @@ public class TestMultipleColumnPrefixFil
@Test
public void testMultipleColumnPrefixFilter() throws IOException {
String family = "Family";
- HTableDescriptor htd = new HTableDescriptor("TestMultipleColumnPrefixFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter"));
HColumnDescriptor hcd = new HColumnDescriptor(family);
hcd.setMaxVersions(3);
htd.addFamily(hcd);
// HRegionInfo info = new HRegionInfo(htd, null, null, false);
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HRegion.createHRegion(info, TEST_UTIL.
getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
@@ -110,14 +110,14 @@ public class TestMultipleColumnPrefixFil
public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException {
String family1 = "Family1";
String family2 = "Family2";
- HTableDescriptor htd = new HTableDescriptor("TestMultipleColumnPrefixFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter"));
HColumnDescriptor hcd1 = new HColumnDescriptor(family1);
hcd1.setMaxVersions(3);
htd.addFamily(hcd1);
HColumnDescriptor hcd2 = new HColumnDescriptor(family2);
hcd2.setMaxVersions(3);
htd.addFamily(hcd2);
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HRegion.createHRegion(info, TEST_UTIL.
getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
@@ -181,9 +181,9 @@ public class TestMultipleColumnPrefixFil
@Test
public void testMultipleColumnPrefixFilterWithColumnPrefixFilter() throws IOException {
String family = "Family";
- HTableDescriptor htd = new HTableDescriptor("TestMultipleColumnPrefixFilter");
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter"));
htd.addFamily(new HColumnDescriptor(family));
- HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HRegion.createHRegion(info, TEST_UTIL.
getDataTestDir(), TEST_UTIL.getConfiguration(),htd);
Added: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java?rev=1511591&view=auto
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java (added)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java Thu Aug 8 06:08:23 2013
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.io;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.HFileArchiveUtil;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test that FileLink switches between alternate locations
+ * when the current location moves or gets deleted.
+ */
+@Category(SmallTests.class)
+public class TestHFileLink {
+
+ @Test
+ public void testValidLinkNames() {
+ String validLinkNames[] = {"foo=fefefe-0123456", "ns=foo=abababa-fefefefe"};
+
+ for(String name : validLinkNames) {
+ Assert.assertTrue("Failed validating:" + name, name.matches(HFileLink.LINK_NAME_REGEX));
+ }
+
+ for(String name : validLinkNames) {
+ Assert.assertTrue("Failed validating:" + name, HFileLink.isHFileLink(name));
+ }
+
+ String testName = "foo=fefefe-0123456";
+ Assert.assertEquals(TableName.valueOf("foo"),
+ HFileLink.getReferencedTableName(testName));
+ Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
+ Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
+ Assert.assertEquals(testName,
+ HFileLink.createHFileLinkName(TableName.valueOf("foo"), "fefefe", "0123456"));
+
+ testName = "ns=foo=fefefe-0123456";
+ Assert.assertEquals(TableName.valueOf("ns", "foo"),
+ HFileLink.getReferencedTableName(testName));
+ Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
+ Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
+ Assert.assertEquals(testName,
+ HFileLink.createHFileLinkName(TableName.valueOf("ns", "foo"), "fefefe", "0123456"));
+
+ for(String name : validLinkNames) {
+ Matcher m = HFileLink.LINK_NAME_PATTERN.matcher(name);
+ assertTrue(m.matches());
+ Assert.assertEquals(HFileLink.getReferencedTableName(name),
+ TableName.valueOf(m.group(1), m.group(2)));
+ Assert.assertEquals(HFileLink.getReferencedRegionName(name),
+ m.group(3));
+ Assert.assertEquals(HFileLink.getReferencedHFileName(name),
+ m.group(4));
+ }
+ }
+
+ @Test
+ public void testBackReference() {
+ Path rootDir = new Path("/root");
+ Path archiveDir = new Path(rootDir, ".archive");
+ String storeFileName = "121212";
+ String linkDir = FileLink.BACK_REFERENCES_DIRECTORY_PREFIX + storeFileName;
+ String encodedRegion = "FEFE";
+ String cf = "cf1";
+
+ TableName refTables[] = {TableName.valueOf("refTable"),
+ TableName.valueOf("ns", "refTable")};
+
+ for(TableName refTable : refTables) {
+ Path refTableDir = FSUtils.getTableDir(archiveDir, refTable);
+ Path refRegionDir = HRegion.getRegionDir(refTableDir, encodedRegion);
+ Path refDir = new Path(refRegionDir, cf);
+ Path refLinkDir = new Path(refDir, linkDir);
+ String refStoreFileName = refTable.getNameAsString().replace(
+ TableName.NAMESPACE_DELIM, '=') + "=" + encodedRegion + "-" + storeFileName;
+
+ TableName tableNames[] = {TableName.valueOf("tableName1"),
+ TableName.valueOf("ns", "tableName2")};
+
+ for( TableName tableName : tableNames) {
+ Path tableDir = FSUtils.getTableDir(rootDir, tableName);
+ Path regionDir = HRegion.getRegionDir(tableDir, encodedRegion);
+ Path cfDir = new Path(regionDir, cf);
+
+ //Verify back reference creation
+ assertEquals(encodedRegion+"."+
+ tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='),
+ HFileLink.createBackReferenceName(tableName.getNameAsString(),
+ encodedRegion));
+
+ //verify parsing back reference
+ Pair<TableName, String> parsedRef =
+ HFileLink.parseBackReferenceName(encodedRegion+"."+
+ tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
+ assertEquals(parsedRef.getFirst(), tableName);
+ assertEquals(parsedRef.getSecond(), encodedRegion);
+
+ //verify resolving back reference
+ Path storeFileDir = new Path(refLinkDir, encodedRegion+"."+
+ tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
+ Path linkPath = new Path(cfDir, refStoreFileName);
+ assertEquals(linkPath, HFileLink.getHFileFromBackReference(rootDir, storeFileDir));
+ }
+ }
+ }
+
+
+}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java Thu Aug 8 06:08:23 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
@@ -90,7 +91,7 @@ public class TestChangingEncoding {
private void prepareTest(String testId) throws IOException {
tableName = "test_table_" + testId;
- HTableDescriptor htd = new HTableDescriptor(tableName);
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
hcd = new HColumnDescriptor(CF);
htd.addFamily(hcd);
admin.createTable(htd);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java Thu Aug 8 06:08:23 2013
@@ -92,7 +92,7 @@ public class TestLoadAndSwitchEncodeOnDi
assertAllOnLine(t);
System.err.println("\nCompacting the table\n");
- admin.majorCompact(TABLE);
+ admin.majorCompact(TABLE.getName());
// Wait until compaction completes
Threads.sleepWithoutInterrupt(5000);
HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java Thu Aug 8 06:08:23 2013
@@ -22,12 +22,12 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -62,7 +61,7 @@ public class TestScannerSelectionUsingTT
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
- private static String TABLE = "myTable";
+ private static TableName TABLE = TableName.valueOf("myTable");
private static String FAMILY = "myCF";
private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);
@@ -106,7 +105,7 @@ public class TestScannerSelectionUsingTT
.setTimeToLive(TTL_SECONDS);
HTableDescriptor htd = new HTableDescriptor(TABLE);
htd.addFamily(hcd);
- HRegionInfo info = new HRegionInfo(Bytes.toBytes(TABLE));
+ HRegionInfo info = new HRegionInfo(TABLE);
HRegion region =
HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()),
conf, htd);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java Thu Aug 8 06:08:23 2013
@@ -31,6 +31,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Set;
import java.util.Random;
import java.util.concurrent.Callable;
@@ -45,6 +46,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -96,7 +98,8 @@ public class TestHFileOutputFormat {
private static final byte[][] FAMILIES
= { Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A"))
, Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
- private static final byte[] TABLE_NAME = Bytes.toBytes("TestTable");
+ private static final TableName TABLE_NAME =
+ TableName.valueOf("TestTable");
private HBaseTestingUtility util = new HBaseTestingUtility();
@@ -440,7 +443,7 @@ public class TestHFileOutputFormat {
LOG.info("Waiting for table to disable");
}
admin.enableTable(TABLE_NAME);
- util.waitTableAvailable(TABLE_NAME);
+ util.waitTableAvailable(TABLE_NAME.getName());
assertEquals("Data should remain after reopening of regions",
tableDigestBefore, util.checksumRows(table));
} finally {
@@ -699,7 +702,7 @@ public class TestHFileOutputFormat {
// deep inspection: get the StoreFile dir
final Path storePath = HStore.getStoreHomedir(
- HTableDescriptor.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
+ FSUtils.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
admin.getTableRegions(TABLE_NAME).get(0),
FAMILIES[0]);
assertEquals(0, fs.listStatus(storePath).length);
@@ -725,7 +728,7 @@ public class TestHFileOutputFormat {
assertEquals(2, fs.listStatus(storePath).length);
// minor compactions shouldn't get rid of the file
- admin.compact(TABLE_NAME);
+ admin.compact(TABLE_NAME.getName());
try {
quickPoll(new Callable<Boolean>() {
public Boolean call() throws Exception {
@@ -738,7 +741,7 @@ public class TestHFileOutputFormat {
}
// a major compaction should work though
- admin.majorCompact(TABLE_NAME);
+ admin.majorCompact(TABLE_NAME.getName());
quickPoll(new Callable<Boolean>() {
public Boolean call() throws Exception {
return fs.listStatus(storePath).length == 1;
@@ -767,7 +770,7 @@ public class TestHFileOutputFormat {
// deep inspection: get the StoreFile dir
final Path storePath = HStore.getStoreHomedir(
- HTableDescriptor.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
+ FSUtils.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
admin.getTableRegions(TABLE_NAME).get(0),
FAMILIES[0]);
assertEquals(0, fs.listStatus(storePath).length);
@@ -776,7 +779,7 @@ public class TestHFileOutputFormat {
Put p = new Put(Bytes.toBytes("test"));
p.add(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
table.put(p);
- admin.flush(TABLE_NAME);
+ admin.flush(TABLE_NAME.getName());
assertEquals(1, util.countRows(table));
quickPoll(new Callable<Boolean>() {
public Boolean call() throws Exception {
@@ -802,7 +805,7 @@ public class TestHFileOutputFormat {
assertEquals(2, fs.listStatus(storePath).length);
// minor compactions shouldn't get rid of the file
- admin.compact(TABLE_NAME);
+ admin.compact(TABLE_NAME.getName());
try {
quickPoll(new Callable<Boolean>() {
public Boolean call() throws Exception {
@@ -815,7 +818,7 @@ public class TestHFileOutputFormat {
}
// a major compaction should work though
- admin.majorCompact(TABLE_NAME);
+ admin.majorCompact(TABLE_NAME.getName());
quickPoll(new Callable<Boolean>() {
public Boolean call() throws Exception {
return fs.listStatus(storePath).length == 1;
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java?rev=1511591&r1=1511590&r2=1511591&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java Thu Aug 8 06:08:23 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -40,7 +41,6 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.junit.AfterClass;
import org.junit.Before;
@@ -57,8 +57,9 @@ public class TestHLogRecordReader {
private static Configuration conf;
private static FileSystem fs;
private static Path hbaseDir;
- private static final byte [] tableName = Bytes.toBytes(getName());
- private static final byte [] rowName = tableName;
+ private static final TableName tableName =
+ TableName.valueOf(getName());
+ private static final byte [] rowName = tableName.getName();
private static final HRegionInfo info = new HRegionInfo(tableName,
Bytes.toBytes(""), Bytes.toBytes(""), false);
private static final byte [] family = Bytes.toBytes("column");