You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2017/11/28 18:40:08 UTC
[1/2] hbase git commit: HBASE-19348 Fix error-prone errors for
branch-1
Repository: hbase
Updated Branches:
refs/heads/branch-1 f7f425e53 -> 24d82195c
refs/heads/branch-1.4 36fae801a -> 6aaa9dcfd
HBASE-19348 Fix error-prone errors for branch-1
Signed-off-by: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6aaa9dcf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6aaa9dcf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6aaa9dcf
Branch: refs/heads/branch-1.4
Commit: 6aaa9dcfd6a349b9c5315309ec7d38849bc3e124
Parents: 36fae80
Author: Chia-Ping Tsai <ch...@gmail.com>
Authored: Tue Nov 28 03:46:14 2017 +0800
Committer: Andrew Purtell <ap...@apache.org>
Committed: Mon Nov 27 18:29:02 2017 -0800
----------------------------------------------------------------------
...MasterAnnotationReadingPriorityFunction.java | 10 +-
.../master/procedure/ServerCrashProcedure.java | 4 +-
.../master/snapshot/CloneSnapshotHandler.java | 1 -
.../org/apache/hadoop/hbase/TestCompare.java | 1 +
.../hadoop/hbase/TestHRegionLocation.java | 1 +
.../hbase/TestIPv6NIOServerSocketChannel.java | 2 +-
.../org/apache/hadoop/hbase/TestZooKeeper.java | 4 +-
.../hadoop/hbase/client/TestMetaScanner.java | 1 +
.../client/TestScannersFromClientSide2.java | 1 +
.../coprocessor/TestCoprocessorInterface.java | 4 +-
.../apache/hadoop/hbase/filter/TestFilter.java | 1 +
.../TestMasterOperationsForRegionReplicas.java | 3 +-
.../procedure/TestMasterProcedureScheduler.java | 2 +
.../procedure/TestWALProcedureStoreOnHDFS.java | 141 +++++++++----------
14 files changed, 85 insertions(+), 91 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
index 1e6dade..dc5d824 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
@@ -70,12 +70,10 @@ public class MasterAnnotationReadingPriorityFunction extends AnnotationReadingPr
RegionServerStatusProtos.ReportRegionStateTransitionRequest
tRequest = (RegionServerStatusProtos.ReportRegionStateTransitionRequest) param;
for (RegionServerStatusProtos.RegionStateTransition rst : tRequest.getTransitionList()) {
- if (rst.getRegionInfoList() != null) {
- for (HBaseProtos.RegionInfo info : rst.getRegionInfoList()) {
- TableName tn = ProtobufUtil.toTableName(info.getTableName());
- if (tn.isSystemTable()) {
- return HConstants.SYSTEMTABLE_QOS;
- }
+ for (HBaseProtos.RegionInfo info : rst.getRegionInfoList()) {
+ TableName tn = ProtobufUtil.toTableName(info.getTableName());
+ if (tn.isSystemTable()) {
+ return HConstants.SYSTEMTABLE_QOS;
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 3463000..1fbc428 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -183,8 +183,8 @@ implements ServerProcedureInterface {
LOG.trace(state);
}
// Keep running count of cycles
- if (state.ordinal() != this.previousState) {
- this.previousState = state.ordinal();
+ if (state.getNumber() != this.previousState) {
+ this.previousState = state.getNumber();
this.cycles = 0;
} else {
this.cycles++;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
index 6f8bcd4..f4a6c95 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
@@ -123,7 +123,6 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot
// Clone acl of snapshot into newly created table.
if (restoreAcl && snapshot.hasUsersAndPermissions()
- && snapshot.getUsersAndPermissions() != null
&& SnapshotDescriptionUtils.isSecurityAvailable(conf)) {
RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, conf);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
index 4b42028..a69df34 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
@@ -33,6 +33,7 @@ public class TestCompare extends TestCase {
/**
* Sort of HRegionInfo.
*/
+ @SuppressWarnings({"SelfComparison"})
public void testHRegionInfo() {
HRegionInfo a = new HRegionInfo(TableName.valueOf("a"), null, null);
HRegionInfo b = new HRegionInfo(TableName.valueOf("b"), null, null);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
index f6488d0..3d6ffb9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
@@ -62,6 +62,7 @@ public class TestHRegionLocation {
System.out.println(hrl1.toString());
}
+ @SuppressWarnings("SelfComparison")
@Test
public void testCompareTo() {
ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index 3dc2871..6fd2a0e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -123,7 +123,7 @@ public class TestIPv6NIOServerSocketChannel {
//On Windows JDK6, we will get expected exception:
//java.net.SocketException: Address family not supported by protocol family
//or java.net.SocketException: Protocol family not supported
- Assert.assertFalse(ex.getClass().isInstance(BindException.class));
+ Assert.assertFalse(ex instanceof BindException);
Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
LOG.info("Received expected exception:");
LOG.info(ex);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index 3441aa6..9070033 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -69,6 +69,7 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -141,7 +142,8 @@ public class TestZooKeeper {
* @throws InterruptedException
*/
// fails frequently, disabled for now, see HBASE-6406
- //@Test
+ @Ignore
+ @Test
public void testClientSessionExpired() throws Exception {
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
index bff9c78..a91560e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
@@ -54,6 +54,7 @@ public class TestMetaScanner {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Connection connection;
+ @SuppressWarnings("JUnit4SetUpNotRun")
public void setUp() throws Exception {
TEST_UTIL.startMiniCluster(1);
this.connection = TEST_UTIL.getConnection();
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
index 4da94f2..ff57ca0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
@@ -217,6 +217,7 @@ public class TestScannersFromClientSide2 {
testScan(456, false, 678, false, 200);
}
+ @Test
public void testReversedScanWithLimit() throws Exception {
testReversedScan(998, true, 1, false, 900); // from last region to first region
testReversedScan(543, true, 321, true, 100);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 32d6af4..906c103 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -272,9 +272,7 @@ public class TestCoprocessorInterface {
@Override
public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e,
final Get get, final List<Cell> results) throws IOException {
- if (1/0 == 1) {
- e.complete();
- }
+ throw new RuntimeException();
}
Map<String, Object> getSharedData() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index b8883bc..50532c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -2049,6 +2049,7 @@ public class TestFilter {
}
}
+ @Test
public void testNestedFilterListWithSCVF() throws IOException {
byte[] columnStatus = Bytes.toBytes("S");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testNestedFilterListWithSCVF"));
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 2112be7..75f5243 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -232,7 +233,7 @@ public class TestMasterOperationsForRegionReplicas {
}
}
- //@Test (TODO: enable when we have support for alter_table- HBASE-10361).
+ @Test
public void testIncompleteMetaTableReplicaInformation() throws Exception {
final TableName table = TableName.valueOf("fooTableTest1");
final int numRegions = 3;
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
index 2b594f4..d3d9b52 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
@@ -488,6 +488,8 @@ public class TestMasterProcedureScheduler {
case READ:
queue.releaseTableSharedLock(proc, getTableName(proc));
break;
+ default:
+ break;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6aaa9dcf/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
index 8a93151..f05b588 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
@@ -83,7 +83,9 @@ public class TestWALProcedureStoreOnHDFS {
}
};
- private static void initConfig(Configuration conf) {
+ @Before
+ public void initConfig() {
+ Configuration conf = UTIL.getConfiguration();
conf.setInt("dfs.replication", 3);
conf.setInt("dfs.namenode.replication.min", 3);
@@ -93,7 +95,8 @@ public class TestWALProcedureStoreOnHDFS {
conf.setInt(WALProcedureStore.MAX_SYNC_FAILURE_ROLL_CONF_KEY, 10);
}
- public void setup() throws Exception {
+ // No @Before because some tests need to do additional config first
+ private void setup() throws Exception {
MiniDFSCluster dfs = UTIL.startMiniDFSCluster(3);
Path logDir = new Path(new Path(dfs.getFileSystem().getUri()), "/test-logs");
@@ -103,6 +106,7 @@ public class TestWALProcedureStoreOnHDFS {
store.recoverLease();
}
+ @After
public void tearDown() throws Exception {
store.stop(false);
UTIL.getDFSCluster().getFileSystem().delete(store.getWALDir(), true);
@@ -116,102 +120,87 @@ public class TestWALProcedureStoreOnHDFS {
@Test(timeout=60000, expected=RuntimeException.class)
public void testWalAbortOnLowReplication() throws Exception {
- initConfig(UTIL.getConfiguration());
setup();
- try {
- assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+ assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
- LOG.info("Stop DataNode");
- UTIL.getDFSCluster().stopDataNode(0);
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ LOG.info("Stop DataNode");
+ UTIL.getDFSCluster().stopDataNode(0);
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- store.insert(new TestProcedure(1, -1), null);
- for (long i = 2; store.isRunning(); ++i) {
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- store.insert(new TestProcedure(i, -1), null);
- Thread.sleep(100);
- }
- assertFalse(store.isRunning());
- fail("The store.insert() should throw an exeption");
- } finally {
- tearDown();
+ store.insert(new TestProcedure(1, -1), null);
+ for (long i = 2; store.isRunning(); ++i) {
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ store.insert(new TestProcedure(i, -1), null);
+ Thread.sleep(100);
}
+ assertFalse(store.isRunning());
+ fail("The store.insert() should throw an exeption");
}
@Test(timeout=60000)
public void testWalAbortOnLowReplicationWithQueuedWriters() throws Exception {
- initConfig(UTIL.getConfiguration());
setup();
- try {
- assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
- store.registerListener(new ProcedureStore.ProcedureStoreListener() {
- @Override
- public void postSync() {
- Threads.sleepWithoutInterrupt(2000);
- }
-
- @Override
- public void abortProcess() {}
- });
-
- final AtomicInteger reCount = new AtomicInteger(0);
- Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
- for (int i = 0; i < thread.length; ++i) {
- final long procId = i + 1;
- thread[i] = new Thread() {
- public void run() {
- try {
- LOG.debug("[S] INSERT " + procId);
- store.insert(new TestProcedure(procId, -1), null);
- LOG.debug("[E] INSERT " + procId);
- } catch (RuntimeException e) {
- reCount.incrementAndGet();
- LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
- }
- }
- };
- thread[i].start();
+ assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+ store.registerListener(new ProcedureStore.ProcedureStoreListener() {
+ @Override
+ public void postSync() {
+ Threads.sleepWithoutInterrupt(2000);
}
- Thread.sleep(1000);
- LOG.info("Stop DataNode");
- UTIL.getDFSCluster().stopDataNode(0);
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ @Override
+ public void abortProcess() {}
+ });
+
+ final AtomicInteger reCount = new AtomicInteger(0);
+ Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
+ for (int i = 0; i < thread.length; ++i) {
+ final long procId = i + 1;
+ thread[i] = new Thread() {
+ public void run() {
+ try {
+ LOG.debug("[S] INSERT " + procId);
+ store.insert(new TestProcedure(procId, -1), null);
+ LOG.debug("[E] INSERT " + procId);
+ } catch (RuntimeException e) {
+ reCount.incrementAndGet();
+ LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
+ }
+ }
+ };
+ thread[i].start();
+ }
- for (int i = 0; i < thread.length; ++i) {
- thread[i].join();
- }
+ Thread.sleep(1000);
+ LOG.info("Stop DataNode");
+ UTIL.getDFSCluster().stopDataNode(0);
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- assertFalse(store.isRunning());
- assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
- reCount.get() < thread.length);
- } finally {
- tearDown();
+ for (int i = 0; i < thread.length; ++i) {
+ thread[i].join();
}
+
+ assertFalse(store.isRunning());
+ assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
+ reCount.get() < thread.length);
}
@Test(timeout=60000)
public void testWalRollOnLowReplication() throws Exception {
- initConfig(UTIL.getConfiguration());
UTIL.getConfiguration().setInt("dfs.namenode.replication.min", 1);
setup();
- try {
- int dnCount = 0;
- store.insert(new TestProcedure(1, -1), null);
- UTIL.getDFSCluster().restartDataNode(dnCount);
- for (long i = 2; i < 100; ++i) {
- store.insert(new TestProcedure(i, -1), null);
- waitForNumReplicas(3);
- Thread.sleep(100);
- if ((i % 30) == 0) {
- LOG.info("Restart Data Node");
- UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
- }
+ int dnCount = 0;
+ store.insert(new TestProcedure(1, -1), null);
+ UTIL.getDFSCluster().restartDataNode(dnCount);
+ for (long i = 2; i < 100; ++i) {
+ store.insert(new TestProcedure(i, -1), null);
+ waitForNumReplicas(3);
+ Thread.sleep(100);
+ if ((i % 30) == 0) {
+ LOG.info("Restart Data Node");
+ UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
}
- assertTrue(store.isRunning());
- } finally {
- tearDown();
}
+ assertTrue(store.isRunning());
}
public void waitForNumReplicas(int numReplicas) throws Exception {
[2/2] hbase git commit: HBASE-19348 Fix error-prone errors for
branch-1
Posted by ap...@apache.org.
HBASE-19348 Fix error-prone errors for branch-1
Signed-off-by: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24d82195
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24d82195
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24d82195
Branch: refs/heads/branch-1
Commit: 24d82195cb82bd90755408b85d70c57cf4b3ff2f
Parents: f7f425e
Author: Chia-Ping Tsai <ch...@gmail.com>
Authored: Tue Nov 28 03:46:14 2017 +0800
Committer: Andrew Purtell <ap...@apache.org>
Committed: Mon Nov 27 18:29:11 2017 -0800
----------------------------------------------------------------------
...MasterAnnotationReadingPriorityFunction.java | 10 +-
.../master/procedure/ServerCrashProcedure.java | 4 +-
.../master/snapshot/CloneSnapshotHandler.java | 1 -
.../org/apache/hadoop/hbase/TestCompare.java | 1 +
.../hadoop/hbase/TestHRegionLocation.java | 1 +
.../hbase/TestIPv6NIOServerSocketChannel.java | 2 +-
.../org/apache/hadoop/hbase/TestZooKeeper.java | 4 +-
.../hadoop/hbase/client/TestMetaScanner.java | 1 +
.../client/TestScannersFromClientSide2.java | 1 +
.../coprocessor/TestCoprocessorInterface.java | 4 +-
.../apache/hadoop/hbase/filter/TestFilter.java | 1 +
.../TestMasterOperationsForRegionReplicas.java | 3 +-
.../procedure/TestMasterProcedureScheduler.java | 2 +
.../procedure/TestWALProcedureStoreOnHDFS.java | 141 +++++++++----------
14 files changed, 85 insertions(+), 91 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
index 1e6dade..dc5d824 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
@@ -70,12 +70,10 @@ public class MasterAnnotationReadingPriorityFunction extends AnnotationReadingPr
RegionServerStatusProtos.ReportRegionStateTransitionRequest
tRequest = (RegionServerStatusProtos.ReportRegionStateTransitionRequest) param;
for (RegionServerStatusProtos.RegionStateTransition rst : tRequest.getTransitionList()) {
- if (rst.getRegionInfoList() != null) {
- for (HBaseProtos.RegionInfo info : rst.getRegionInfoList()) {
- TableName tn = ProtobufUtil.toTableName(info.getTableName());
- if (tn.isSystemTable()) {
- return HConstants.SYSTEMTABLE_QOS;
- }
+ for (HBaseProtos.RegionInfo info : rst.getRegionInfoList()) {
+ TableName tn = ProtobufUtil.toTableName(info.getTableName());
+ if (tn.isSystemTable()) {
+ return HConstants.SYSTEMTABLE_QOS;
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 3463000..1fbc428 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -183,8 +183,8 @@ implements ServerProcedureInterface {
LOG.trace(state);
}
// Keep running count of cycles
- if (state.ordinal() != this.previousState) {
- this.previousState = state.ordinal();
+ if (state.getNumber() != this.previousState) {
+ this.previousState = state.getNumber();
this.cycles = 0;
} else {
this.cycles++;
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
index 6f8bcd4..f4a6c95 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
@@ -123,7 +123,6 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot
// Clone acl of snapshot into newly created table.
if (restoreAcl && snapshot.hasUsersAndPermissions()
- && snapshot.getUsersAndPermissions() != null
&& SnapshotDescriptionUtils.isSecurityAvailable(conf)) {
RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, conf);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
index 4b42028..a69df34 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
@@ -33,6 +33,7 @@ public class TestCompare extends TestCase {
/**
* Sort of HRegionInfo.
*/
+ @SuppressWarnings({"SelfComparison"})
public void testHRegionInfo() {
HRegionInfo a = new HRegionInfo(TableName.valueOf("a"), null, null);
HRegionInfo b = new HRegionInfo(TableName.valueOf("b"), null, null);
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
index f6488d0..3d6ffb9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
@@ -62,6 +62,7 @@ public class TestHRegionLocation {
System.out.println(hrl1.toString());
}
+ @SuppressWarnings("SelfComparison")
@Test
public void testCompareTo() {
ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index 3dc2871..6fd2a0e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -123,7 +123,7 @@ public class TestIPv6NIOServerSocketChannel {
//On Windows JDK6, we will get expected exception:
//java.net.SocketException: Address family not supported by protocol family
//or java.net.SocketException: Protocol family not supported
- Assert.assertFalse(ex.getClass().isInstance(BindException.class));
+ Assert.assertFalse(ex instanceof BindException);
Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
LOG.info("Received expected exception:");
LOG.info(ex);
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index 3441aa6..9070033 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -69,6 +69,7 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -141,7 +142,8 @@ public class TestZooKeeper {
* @throws InterruptedException
*/
// fails frequently, disabled for now, see HBASE-6406
- //@Test
+ @Ignore
+ @Test
public void testClientSessionExpired() throws Exception {
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
index bead7e9..bca8cf3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
@@ -54,6 +54,7 @@ public class TestMetaScanner {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Connection connection;
+ @SuppressWarnings("JUnit4SetUpNotRun")
public void setUp() throws Exception {
TEST_UTIL.startMiniCluster(1);
this.connection = TEST_UTIL.getConnection();
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
index 4da94f2..ff57ca0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
@@ -217,6 +217,7 @@ public class TestScannersFromClientSide2 {
testScan(456, false, 678, false, 200);
}
+ @Test
public void testReversedScanWithLimit() throws Exception {
testReversedScan(998, true, 1, false, 900); // from last region to first region
testReversedScan(543, true, 321, true, 100);
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 32d6af4..906c103 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -272,9 +272,7 @@ public class TestCoprocessorInterface {
@Override
public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e,
final Get get, final List<Cell> results) throws IOException {
- if (1/0 == 1) {
- e.complete();
- }
+ throw new RuntimeException();
}
Map<String, Object> getSharedData() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index b8883bc..50532c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -2049,6 +2049,7 @@ public class TestFilter {
}
}
+ @Test
public void testNestedFilterListWithSCVF() throws IOException {
byte[] columnStatus = Bytes.toBytes("S");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testNestedFilterListWithSCVF"));
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 2112be7..75f5243 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -232,7 +233,7 @@ public class TestMasterOperationsForRegionReplicas {
}
}
- //@Test (TODO: enable when we have support for alter_table- HBASE-10361).
+ @Test
public void testIncompleteMetaTableReplicaInformation() throws Exception {
final TableName table = TableName.valueOf("fooTableTest1");
final int numRegions = 3;
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
index 2b594f4..d3d9b52 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
@@ -488,6 +488,8 @@ public class TestMasterProcedureScheduler {
case READ:
queue.releaseTableSharedLock(proc, getTableName(proc));
break;
+ default:
+ break;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d82195/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
index 8a93151..f05b588 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
@@ -83,7 +83,9 @@ public class TestWALProcedureStoreOnHDFS {
}
};
- private static void initConfig(Configuration conf) {
+ @Before
+ public void initConfig() {
+ Configuration conf = UTIL.getConfiguration();
conf.setInt("dfs.replication", 3);
conf.setInt("dfs.namenode.replication.min", 3);
@@ -93,7 +95,8 @@ public class TestWALProcedureStoreOnHDFS {
conf.setInt(WALProcedureStore.MAX_SYNC_FAILURE_ROLL_CONF_KEY, 10);
}
- public void setup() throws Exception {
+ // No @Before because some tests need to do additional config first
+ private void setup() throws Exception {
MiniDFSCluster dfs = UTIL.startMiniDFSCluster(3);
Path logDir = new Path(new Path(dfs.getFileSystem().getUri()), "/test-logs");
@@ -103,6 +106,7 @@ public class TestWALProcedureStoreOnHDFS {
store.recoverLease();
}
+ @After
public void tearDown() throws Exception {
store.stop(false);
UTIL.getDFSCluster().getFileSystem().delete(store.getWALDir(), true);
@@ -116,102 +120,87 @@ public class TestWALProcedureStoreOnHDFS {
@Test(timeout=60000, expected=RuntimeException.class)
public void testWalAbortOnLowReplication() throws Exception {
- initConfig(UTIL.getConfiguration());
setup();
- try {
- assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+ assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
- LOG.info("Stop DataNode");
- UTIL.getDFSCluster().stopDataNode(0);
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ LOG.info("Stop DataNode");
+ UTIL.getDFSCluster().stopDataNode(0);
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- store.insert(new TestProcedure(1, -1), null);
- for (long i = 2; store.isRunning(); ++i) {
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- store.insert(new TestProcedure(i, -1), null);
- Thread.sleep(100);
- }
- assertFalse(store.isRunning());
- fail("The store.insert() should throw an exeption");
- } finally {
- tearDown();
+ store.insert(new TestProcedure(1, -1), null);
+ for (long i = 2; store.isRunning(); ++i) {
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ store.insert(new TestProcedure(i, -1), null);
+ Thread.sleep(100);
}
+ assertFalse(store.isRunning());
+ fail("The store.insert() should throw an exeption");
}
@Test(timeout=60000)
public void testWalAbortOnLowReplicationWithQueuedWriters() throws Exception {
- initConfig(UTIL.getConfiguration());
setup();
- try {
- assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
- store.registerListener(new ProcedureStore.ProcedureStoreListener() {
- @Override
- public void postSync() {
- Threads.sleepWithoutInterrupt(2000);
- }
-
- @Override
- public void abortProcess() {}
- });
-
- final AtomicInteger reCount = new AtomicInteger(0);
- Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
- for (int i = 0; i < thread.length; ++i) {
- final long procId = i + 1;
- thread[i] = new Thread() {
- public void run() {
- try {
- LOG.debug("[S] INSERT " + procId);
- store.insert(new TestProcedure(procId, -1), null);
- LOG.debug("[E] INSERT " + procId);
- } catch (RuntimeException e) {
- reCount.incrementAndGet();
- LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
- }
- }
- };
- thread[i].start();
+ assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+ store.registerListener(new ProcedureStore.ProcedureStoreListener() {
+ @Override
+ public void postSync() {
+ Threads.sleepWithoutInterrupt(2000);
}
- Thread.sleep(1000);
- LOG.info("Stop DataNode");
- UTIL.getDFSCluster().stopDataNode(0);
- assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+ @Override
+ public void abortProcess() {}
+ });
+
+ final AtomicInteger reCount = new AtomicInteger(0);
+ Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
+ for (int i = 0; i < thread.length; ++i) {
+ final long procId = i + 1;
+ thread[i] = new Thread() {
+ public void run() {
+ try {
+ LOG.debug("[S] INSERT " + procId);
+ store.insert(new TestProcedure(procId, -1), null);
+ LOG.debug("[E] INSERT " + procId);
+ } catch (RuntimeException e) {
+ reCount.incrementAndGet();
+ LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
+ }
+ }
+ };
+ thread[i].start();
+ }
- for (int i = 0; i < thread.length; ++i) {
- thread[i].join();
- }
+ Thread.sleep(1000);
+ LOG.info("Stop DataNode");
+ UTIL.getDFSCluster().stopDataNode(0);
+ assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
- assertFalse(store.isRunning());
- assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
- reCount.get() < thread.length);
- } finally {
- tearDown();
+ for (int i = 0; i < thread.length; ++i) {
+ thread[i].join();
}
+
+ assertFalse(store.isRunning());
+ assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
+ reCount.get() < thread.length);
}
@Test(timeout=60000)
public void testWalRollOnLowReplication() throws Exception {
- initConfig(UTIL.getConfiguration());
UTIL.getConfiguration().setInt("dfs.namenode.replication.min", 1);
setup();
- try {
- int dnCount = 0;
- store.insert(new TestProcedure(1, -1), null);
- UTIL.getDFSCluster().restartDataNode(dnCount);
- for (long i = 2; i < 100; ++i) {
- store.insert(new TestProcedure(i, -1), null);
- waitForNumReplicas(3);
- Thread.sleep(100);
- if ((i % 30) == 0) {
- LOG.info("Restart Data Node");
- UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
- }
+ int dnCount = 0;
+ store.insert(new TestProcedure(1, -1), null);
+ UTIL.getDFSCluster().restartDataNode(dnCount);
+ for (long i = 2; i < 100; ++i) {
+ store.insert(new TestProcedure(i, -1), null);
+ waitForNumReplicas(3);
+ Thread.sleep(100);
+ if ((i % 30) == 0) {
+ LOG.info("Restart Data Node");
+ UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
}
- assertTrue(store.isRunning());
- } finally {
- tearDown();
}
+ assertTrue(store.isRunning());
}
public void waitForNumReplicas(int numReplicas) throws Exception {