You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2015/12/02 21:07:16 UTC

hbase git commit: HBASE-14541 TestHFileOutputFormat.testMRIncrementalLoadWithSplit failed due to too many splits and few retries

Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 f2ce05d8a -> 2444047d0


HBASE-14541 TestHFileOutputFormat.testMRIncrementalLoadWithSplit failed due to too many splits and few retries


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2444047d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2444047d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2444047d

Branch: refs/heads/branch-1.2
Commit: 2444047d076f64fbd8acf7684426101f5b1c0527
Parents: f2ce05d
Author: stack <st...@apache.org>
Authored: Wed Dec 2 12:06:53 2015 -0800
Committer: stack <st...@apache.org>
Committed: Wed Dec 2 12:06:53 2015 -0800

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  7 ++++++-
 .../mapreduce/TestLoadIncrementalHFiles.java    | 22 ++++++++++++++++++++
 2 files changed, 28 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/2444047d/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 91c6bc8..443cd96 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -428,6 +428,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
         }
 
         int maxRetries = getConf().getInt("hbase.bulkload.retries.number", 10);
+        maxRetries = Math.max(maxRetries, startEndKeys.getFirst().length + 1);
         if (maxRetries != 0 && count >= maxRetries) {
           throw new IOException("Retry attempted " + count +
             " times without completing, bailing out");
@@ -611,7 +612,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
 
     // We use a '_' prefix which is ignored when walking directory trees
     // above.
-    final Path tmpDir = new Path(item.hfilePath.getParent(), "_tmp");
+    final String TMP_DIR = "_tmp";
+    Path tmpDir = item.hfilePath.getParent();
+    if (!tmpDir.getName().equals(TMP_DIR)) {
+      tmpDir = new Path(tmpDir, TMP_DIR);
+    }
 
     LOG.info("HFile at " + hfilePath + " no longer fits inside a single " +
     "region. Splitting...");

http://git-wip-us.apache.org/repos/asf/hbase/blob/2444047d/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 331aae8..27bac6f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -191,6 +191,28 @@ public class TestLoadIncrementalHFiles {
     testRegionCrossingHFileSplit(BloomType.ROWCOL);
   }
 
+  @Test
+  public void testSplitALot() throws Exception {
+    runTest("testSplitALot", BloomType.NONE,
+      new byte[][] {
+        Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"),
+        Bytes.toBytes("ccc"), Bytes.toBytes("ddd"),
+        Bytes.toBytes("eee"), Bytes.toBytes("fff"),
+        Bytes.toBytes("ggg"), Bytes.toBytes("hhh"),
+        Bytes.toBytes("iii"), Bytes.toBytes("lll"),
+        Bytes.toBytes("mmm"), Bytes.toBytes("nnn"),
+        Bytes.toBytes("ooo"), Bytes.toBytes("ppp"),
+        Bytes.toBytes("qqq"), Bytes.toBytes("rrr"),
+        Bytes.toBytes("sss"), Bytes.toBytes("ttt"),
+        Bytes.toBytes("uuu"), Bytes.toBytes("vvv"),
+        Bytes.toBytes("zzz"),
+      },
+      new byte[][][] {
+        new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") },
+      }
+    );
+  }
+
   private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception {
     runTest("testHFileSplit" + bloomType + "Bloom", bloomType,
         new byte[][] {