You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zg...@apache.org on 2020/04/02 15:30:38 UTC
[hbase] branch branch-2.2 updated: HBASE-24021 Fail fast when
bulkLoadHFiles method catch some IOException (#1343)
This is an automated email from the ASF dual-hosted git repository.
zghao pushed a commit to branch branch-2.2
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.2 by this push:
new b1009e0 HBASE-24021 Fail fast when bulkLoadHFiles method catch some IOException (#1343)
b1009e0 is described below
commit b1009e041b148c8faca6afc7c7ab22129c3fa87b
Author: niuyulin <ny...@163.com>
AuthorDate: Thu Apr 2 23:15:14 2020 +0800
HBASE-24021 Fail fast when bulkLoadHFiles method catch some IOException (#1343)
Signed-off-by: Guanghao Zhang <zg...@apache.org>
---
.../apache/hadoop/hbase/regionserver/HRegion.java | 26 +++++++++-------------
.../hadoop/hbase/regionserver/TestBulkLoad.java | 17 ++++++++++++++
2 files changed, 28 insertions(+), 15 deletions(-)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 467b49f..bd6d311 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -177,7 +177,6 @@ import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALSplitUtil;
import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay;
-import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
@@ -6164,8 +6163,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @throws IOException if failed unrecoverably.
*/
public Map<byte[], List<Path>> bulkLoadHFiles(Collection<Pair<byte[], String>> familyPaths,
- boolean assignSeqId, BulkLoadListener bulkLoadListener,
- boolean copyFile, List<String> clusterIds, boolean replicate) throws IOException {
+ boolean assignSeqId, BulkLoadListener bulkLoadListener, boolean copyFile,
+ List<String> clusterIds, boolean replicate) throws IOException {
long seqId = -1;
Map<byte[], List<Path>> storeFiles = new TreeMap<>(Bytes.BYTES_COMPARATOR);
Map<String, Long> storeFilesSizes = new HashMap<>();
@@ -6177,9 +6176,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
this.writeRequestsCount.increment();
// There possibly was a split that happened between when the split keys
- // were gathered and before the HRegion's write lock was taken. We need
+ // were gathered and before the HRegion's write lock was taken. We need
// to validate the HFile region before attempting to bulk load all of them
- List<IOException> ioes = new ArrayList<>();
+ IOException ioException = null;
List<Pair<byte[], String>> failures = new ArrayList<>();
for (Pair<byte[], String> p : familyPaths) {
byte[] familyName = p.getFirst();
@@ -6187,9 +6186,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
HStore store = getStore(familyName);
if (store == null) {
- IOException ioe = new org.apache.hadoop.hbase.DoNotRetryIOException(
+ ioException = new org.apache.hadoop.hbase.DoNotRetryIOException(
"No such column family " + Bytes.toStringBinary(familyName));
- ioes.add(ioe);
} else {
try {
store.assertBulkLoadHFileOk(new Path(path));
@@ -6198,18 +6196,16 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
failures.add(p);
} catch (IOException ioe) {
// unrecoverable (hdfs problem)
- ioes.add(ioe);
+ ioException = ioe;
}
}
- }
- // validation failed because of some sort of IO problem.
- if (ioes.size() != 0) {
- IOException e = MultipleIOException.createIOException(ioes);
- LOG.error("There were one or more IO errors when checking if the bulk load is ok.", e);
- throw e;
+ // validation failed because of some sort of IO problem.
+ if (ioException != null) {
+ LOG.error("There was IO error when checking if the bulk load is ok.", ioException);
+ throw ioException;
+ }
}
-
// validation failed, bail out before doing anything permanent.
if (failures.size() != 0) {
StringBuilder list = new StringBuilder();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index 0e3fac9..0bc9df3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -93,6 +93,7 @@ public class TestBulkLoad {
private final byte[] randomBytes = new byte[100];
private final byte[] family1 = Bytes.toBytes("family1");
private final byte[] family2 = Bytes.toBytes("family2");
+ private final byte[] family3 = Bytes.toBytes("family3");
@Rule
public TestName name = new TestName();
@@ -202,6 +203,13 @@ public class TestBulkLoad {
null);
}
+ // after HBASE-24021 will throw DoNotRetryIOException, not MultipleIOException
+ @Test(expected = DoNotRetryIOException.class)
+ public void shouldCrashIfBulkLoadMultiFamiliesNotInTable() throws IOException {
+ testRegionWithFamilies(family1).bulkLoadHFiles(withFamilyPathsFor(family1, family2, family3),
+ false, null);
+ }
+
@Test(expected = DoNotRetryIOException.class)
public void bulkHLogShouldThrowErrorWhenFamilySpecifiedAndHFileExistsButNotInTableDescriptor()
throws IOException {
@@ -221,6 +229,15 @@ public class TestBulkLoad {
testRegionWithFamilies(family1).bulkLoadHFiles(list, false, null);
}
+ // after HBASE-24021 will throw FileNotFoundException, not MultipleIOException
+ @Test(expected = FileNotFoundException.class)
+ public void shouldThrowErrorIfMultiHFileDoesNotExist() throws IOException {
+ List<Pair<byte[], String>> list = new ArrayList<>();
+ list.addAll(asList(withMissingHFileForFamily(family1)));
+ list.addAll(asList(withMissingHFileForFamily(family2)));
+ testRegionWithFamilies(family1, family2).bulkLoadHFiles(list, false, null);
+ }
+
private Pair<byte[], String> withMissingHFileForFamily(byte[] family) {
return new Pair<>(family, getNotExistFilePath());
}