You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2015/10/24 00:49:29 UTC
[09/50] [abbrv] hbase git commit: HBASE-14541
TestHFileOutputFormat.testMRIncrementalLoadWithSplit failed due to too many
splits and few retries
HBASE-14541 TestHFileOutputFormat.testMRIncrementalLoadWithSplit failed due to too many splits and few retries
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fb583dd1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fb583dd1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fb583dd1
Branch: refs/heads/hbase-12439
Commit: fb583dd1ea5850f8d826bd39f9f8a61f5053e8e3
Parents: 57fea77
Author: Matteo Bertozzi <ma...@cloudera.com>
Authored: Mon Oct 19 09:22:59 2015 -0700
Committer: Matteo Bertozzi <ma...@cloudera.com>
Committed: Mon Oct 19 09:22:59 2015 -0700
----------------------------------------------------------------------
.../hbase/mapreduce/LoadIncrementalHFiles.java | 7 ++++++-
.../mapreduce/TestLoadIncrementalHFiles.java | 22 ++++++++++++++++++++
2 files changed, 28 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/fb583dd1/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 9ff8a22..7a59ea1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -398,6 +398,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
}
int maxRetries = getConf().getInt("hbase.bulkload.retries.number", 10);
+ maxRetries = Math.max(maxRetries, startEndKeys.getFirst().length + 1);
if (maxRetries != 0 && count >= maxRetries) {
throw new IOException("Retry attempted " + count +
" times without completing, bailing out");
@@ -581,7 +582,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
// We use a '_' prefix which is ignored when walking directory trees
// above.
- final Path tmpDir = new Path(item.hfilePath.getParent(), "_tmp");
+ final String TMP_DIR = "_tmp";
+ Path tmpDir = item.hfilePath.getParent();
+ if (!tmpDir.getName().equals(TMP_DIR)) {
+ tmpDir = new Path(tmpDir, TMP_DIR);
+ }
LOG.info("HFile at " + hfilePath + " no longer fits inside a single " +
"region. Splitting...");
http://git-wip-us.apache.org/repos/asf/hbase/blob/fb583dd1/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 20c64c4..fcf9ef2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -189,6 +189,28 @@ public class TestLoadIncrementalHFiles {
testRegionCrossingHFileSplit(BloomType.ROWCOL);
}
+ @Test
+ public void testSplitALot() throws Exception {
+ runTest("testSplitALot", BloomType.NONE,
+ new byte[][] {
+ Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"),
+ Bytes.toBytes("ccc"), Bytes.toBytes("ddd"),
+ Bytes.toBytes("eee"), Bytes.toBytes("fff"),
+ Bytes.toBytes("ggg"), Bytes.toBytes("hhh"),
+ Bytes.toBytes("iii"), Bytes.toBytes("lll"),
+ Bytes.toBytes("mmm"), Bytes.toBytes("nnn"),
+ Bytes.toBytes("ooo"), Bytes.toBytes("ppp"),
+ Bytes.toBytes("qqq"), Bytes.toBytes("rrr"),
+ Bytes.toBytes("sss"), Bytes.toBytes("ttt"),
+ Bytes.toBytes("uuu"), Bytes.toBytes("vvv"),
+ Bytes.toBytes("zzz"),
+ },
+ new byte[][][] {
+ new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") },
+ }
+ );
+ }
+
private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception {
runTest("testHFileSplit" + bloomType + "Bloom", bloomType,
new byte[][] {