You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2018/02/10 12:37:24 UTC

[11/50] [abbrv] hbase git commit: HBASE-19964 Addendum retry on major compaction

HBASE-19964 Addendum retry on major compaction


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b001b184
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b001b184
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b001b184

Branch: refs/heads/HBASE-19397-branch-2
Commit: b001b1849a0f6ab5fe9137fb32c54f6e26089661
Parents: 3f12706
Author: zhangduo <zh...@apache.org>
Authored: Sat Feb 10 17:43:08 2018 +0800
Committer: zhangduo <zh...@apache.org>
Committed: Sat Feb 10 20:08:22 2018 +0800

----------------------------------------------------------------------
 .../TestWriteHeavyIncrementObserver.java        | 27 ++++++++------------
 1 file changed, 10 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/b001b184/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java
index 7ef1558..639461b 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestWriteHeavyIncrementObserver.java
@@ -21,12 +21,12 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -59,30 +59,23 @@ public class TestWriteHeavyIncrementObserver extends WriteHeavyIncrementObserver
     // we do not hack scan operation so using scan we could get the original values added into the
     // table.
     try (ResultScanner scanner = TABLE.getScanner(new Scan().withStartRow(ROW)
-        .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
+      .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
       Result r = scanner.next();
       assertTrue(r.rawCells().length > 2);
     }
     UTIL.flush(NAME);
-    UTIL.getAdmin().majorCompact(NAME);
-    HStore store = UTIL.getHBaseCluster().findRegionsForTable(NAME).get(0).getStore(FAMILY);
-    Waiter.waitFor(UTIL.getConfiguration(), 30000, new Waiter.ExplainingPredicate<Exception>() {
-
-      @Override
-      public boolean evaluate() throws Exception {
-        return store.getStorefilesCount() == 1;
-      }
-
-      @Override
-      public String explainFailure() throws Exception {
-        return "Major compaction hangs, there are still " + store.getStorefilesCount() +
-            " store files";
+    HRegion region = UTIL.getHBaseCluster().findRegionsForTable(NAME).get(0);
+    HStore store = region.getStore(FAMILY);
+    for (;;) {
+      region.compact(true);
+      if (store.getStorefilesCount() == 1) {
+        break;
       }
-    });
+    }
     assertSum();
     // Should only have two cells after flush and major compaction
     try (ResultScanner scanner = TABLE.getScanner(new Scan().withStartRow(ROW)
-        .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
+      .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
       Result r = scanner.next();
       assertEquals(2, r.rawCells().length);
     }