You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2023/06/22 13:53:30 UTC
[hbase] branch branch-3 updated: HBASE-27936 NPE in StoreFileReader.passesGeneralRowPrefixBloomFilter() (#5300)
This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-3 by this push:
new 668af8b945c HBASE-27936 NPE in StoreFileReader.passesGeneralRowPrefixBloomFilter() (#5300)
668af8b945c is described below
commit 668af8b945c2f4fc3a1bb25db15db3c5984dec8f
Author: Duo Zhang <zh...@apache.org>
AuthorDate: Thu Jun 22 21:33:33 2023 +0800
HBASE-27936 NPE in StoreFileReader.passesGeneralRowPrefixBloomFilter() (#5300)
Need to also copy bloomFilterMetrics in StoreFileReader.copyFields
Signed-off-by: Viraj Jasani <vj...@apache.org>
(cherry picked from commit da171c341eab8adebab756743243d94477a9074f)
---
.../hadoop/hbase/regionserver/StoreFileReader.java | 14 +-
.../hadoop/hbase/regionserver/StoreFileWriter.java | 3 +-
.../hadoop/hbase/regionserver/RegionAsTable.java | 47 ++---
.../hbase/regionserver/TestBloomFilterFaulty.java | 200 +++++++++++++++++++++
4 files changed, 235 insertions(+), 29 deletions(-)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
index dc8b06200ae..72e93c3f75a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
@@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.regionserver.HStoreFile.BLOOM_FILTER_TYPE_
import static org.apache.hadoop.hbase.regionserver.HStoreFile.DELETE_FAMILY_COUNT;
import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY;
+import com.google.errorprone.annotations.RestrictedApi;
import java.io.DataInput;
import java.io.IOException;
import java.util.Map;
@@ -103,6 +104,7 @@ public class StoreFileReader {
this.generalBloomFilter = storeFileReader.generalBloomFilter;
this.deleteFamilyBloomFilter = storeFileReader.deleteFamilyBloomFilter;
this.bloomFilterType = storeFileReader.bloomFilterType;
+ this.bloomFilterMetrics = storeFileReader.bloomFilterMetrics;
this.sequenceID = storeFileReader.sequenceID;
this.timeRange = storeFileReader.timeRange;
this.lastBloomKey = storeFileReader.lastBloomKey;
@@ -496,7 +498,9 @@ public class StoreFileReader {
return fi;
}
- public void loadBloomfilter() {
+ @RestrictedApi(explanation = "Should only be called in tests", link = "",
+ allowedOnPath = ".*/src/test/.*")
+ void loadBloomfilter() {
this.loadBloomfilter(BlockType.GENERAL_BLOOM_META, null);
this.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META, null);
}
@@ -546,7 +550,9 @@ public class StoreFileReader {
}
}
- private void setBloomFilterFaulty(BlockType blockType) {
+ @RestrictedApi(explanation = "Should only be called in tests", link = "",
+ allowedOnPath = ".*/StoreFileReader.java|.*/src/test/.*")
+ void setBloomFilterFaulty(BlockType blockType) {
if (blockType == BlockType.GENERAL_BLOOM_META) {
setGeneralBloomFilterFaulty();
} else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
@@ -563,11 +569,11 @@ public class StoreFileReader {
return generalBloomFilter != null ? generalBloomFilter.getKeyCount() : reader.getEntries();
}
- public void setGeneralBloomFilterFaulty() {
+ private void setGeneralBloomFilterFaulty() {
generalBloomFilter = null;
}
- public void setDeleteFamilyBloomFilterFaulty() {
+ private void setDeleteFamilyBloomFilterFaulty() {
this.deleteFamilyBloomFilter = null;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index b76867d1c22..17e0001fb0c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -154,8 +154,7 @@ public class StoreFileWriter implements CellSink, ShipperListener {
this.bloomType = BloomType.NONE;
}
- // initialize delete family Bloom filter when there is NO RowCol Bloom
- // filter
+ // initialize delete family Bloom filter when there is NO RowCol Bloom filter
if (this.bloomType != BloomType.ROWCOL) {
this.deleteFamilyBloomFilterWriter = BloomFilterFactory.createDeleteBloomAtWrite(conf,
cacheConf, (int) Math.min(maxKeys, Integer.MAX_VALUE), writer);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
index 951f2e8d53f..e0d4981a7f1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@@ -129,39 +128,41 @@ public class RegionAsTable implements Table {
}
static class RegionScannerToResultScannerAdaptor implements ResultScanner {
- private static final Result[] EMPTY_RESULT_ARRAY = new Result[0];
- private final RegionScanner regionScanner;
- RegionScannerToResultScannerAdaptor(final RegionScanner regionScanner) {
- this.regionScanner = regionScanner;
- }
+ private final RegionScanner scanner;
- @Override
- public Iterator<Result> iterator() {
- throw new UnsupportedOperationException();
- }
+ private boolean moreRows = true;
- @Override
- public Result next() throws IOException {
- List<Cell> cells = new ArrayList<>();
- return regionScanner.next(cells) ? Result.create(cells) : null;
+ private final List<Cell> cells = new ArrayList<>();
+
+ RegionScannerToResultScannerAdaptor(final RegionScanner scanner) {
+ this.scanner = scanner;
}
@Override
- public Result[] next(int nbRows) throws IOException {
- List<Result> results = new ArrayList<>(nbRows);
- for (int i = 0; i < nbRows; i++) {
- Result result = next();
- if (result == null) break;
- results.add(result);
+ public Result next() throws IOException {
+ if (!moreRows) {
+ return null;
+ }
+ for (;;) {
+ moreRows = scanner.next(cells);
+ if (cells.isEmpty()) {
+ if (!moreRows) {
+ return null;
+ } else {
+ continue;
+ }
+ }
+ Result result = Result.create(cells);
+ cells.clear();
+ return result;
}
- return results.toArray(EMPTY_RESULT_ARRAY);
}
@Override
public void close() {
try {
- regionScanner.close();
+ scanner.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
@@ -174,7 +175,7 @@ public class RegionAsTable implements Table {
@Override
public ScanMetrics getScanMetrics() {
- throw new UnsupportedOperationException();
+ return null;
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBloomFilterFaulty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBloomFilterFaulty.java
new file mode 100644
index 00000000000..3aac6b0f10f
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBloomFilterFaulty.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Scan.ReadType;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.io.hfile.BlockType;
+import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * A UT to make sure that everything is fine when we fail to load bloom filter.
+ * <p>
+ * See HBASE-27936 for more details.
+ */
+@Category({ RegionServerTests.class, MediumTests.class })
+public class TestBloomFilterFaulty {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestBloomFilterFaulty.class);
+
+ private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
+
+ private static final byte[] FAMILY = Bytes.toBytes("family");
+
+ private static final byte[] QUAL = Bytes.toBytes("qualifier");
+
+ private static final TableDescriptor TD =
+ TableDescriptorBuilder.newBuilder(TableName.valueOf("test"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY)
+ .setBloomFilterType(BloomType.ROWPREFIX_FIXED_LENGTH)
+ .setConfiguration("RowPrefixBloomFilter.prefix_length", "2").build())
+ .build();
+
+ private static final RegionInfo RI = RegionInfoBuilder.newBuilder(TD.getTableName()).build();
+
+ @AfterClass
+ public static void tearDownAfterClass() {
+ UTIL.cleanupTestDir();
+ }
+
+ private HRegion region;
+
+ @Rule
+ public final TestName name = new TestName();
+
+ private void generateHFiles() throws IOException {
+ for (int i = 0; i < 4; i++) {
+ long ts = EnvironmentEdgeManager.currentTime();
+ for (int j = 0; j < 5; j++) {
+ byte[] row = Bytes.toBytes(j);
+ region.put(new Put(row).addColumn(FAMILY, QUAL, ts, Bytes.toBytes(i * 10 + j)));
+ region.delete(new Delete(row).addFamilyVersion(FAMILY, ts));
+ }
+
+ for (int j = 5; j < 10; j++) {
+ byte[] row = Bytes.toBytes(j);
+ region.put(new Put(row).addColumn(FAMILY, QUAL, ts + 1, Bytes.toBytes(i * 10 + j)));
+ }
+
+ FlushResult result = region.flush(true);
+ if (
+ result.getResult() == FlushResult.Result.CANNOT_FLUSH
+ || result.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY
+ ) {
+ throw new IOException("Can not flush region, flush result: " + result);
+ }
+ }
+ }
+
+ @Before
+ public void setUp() throws IOException {
+ Path rootDir = UTIL.getDataTestDir(name.getMethodName());
+ // generate some hfiles so we can have StoreFileReader which has bloomfilters
+ region = HBaseTestingUtil.createRegionAndWAL(RI, rootDir, UTIL.getConfiguration(), TD);
+ generateHFiles();
+ HStore store = region.getStore(FAMILY);
+ for (HStoreFile storefile : store.getStorefiles()) {
+ storefile.initReader();
+ StoreFileReader reader = storefile.getReader();
+ // make sure we load bloom filters correctly
+ assertNotNull(reader.generalBloomFilter);
+ assertNotNull(reader.deleteFamilyBloomFilter);
+ }
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ if (region != null) {
+ HBaseTestingUtil.closeRegionAndWAL(region);
+ }
+ }
+
+ private void setFaulty(BlockType type) {
+ HStore store = region.getStore(FAMILY);
+ for (HStoreFile storefile : store.getStorefiles()) {
+ storefile.getReader().setBloomFilterFaulty(type);
+ }
+ }
+
+ private void testGet() throws IOException {
+ for (int i = 0; i < 5; i++) {
+ assertTrue(region.get(new Get(Bytes.toBytes(i))).isEmpty());
+ }
+ for (int i = 5; i < 10; i++) {
+ assertEquals(30 + i,
+ Bytes.toInt(region.get(new Get(Bytes.toBytes(i))).getValue(FAMILY, QUAL)));
+ }
+ }
+
+ private void testStreamScan() throws IOException {
+ try (RegionAsTable table = new RegionAsTable(region);
+ ResultScanner scanner = table.getScanner(new Scan().setReadType(ReadType.STREAM))) {
+ for (int i = 5; i < 10; i++) {
+ Result result = scanner.next();
+ assertEquals(i, Bytes.toInt(result.getRow()));
+ assertEquals(30 + i, Bytes.toInt(result.getValue(FAMILY, QUAL)));
+ }
+ assertNull(scanner.next());
+ }
+ }
+
+ private void testRegion() throws IOException {
+ // normal read
+ testGet();
+ // scan with stream reader
+ testStreamScan();
+ // major compact
+ region.compact(true);
+ // test read and scan again
+ testGet();
+ testStreamScan();
+ }
+
+ @Test
+ public void testNoGeneralBloomFilter() throws IOException {
+ setFaulty(BlockType.GENERAL_BLOOM_META);
+ testRegion();
+ }
+
+ @Test
+ public void testNoDeleteFamilyBloomFilter() throws IOException {
+ setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
+ testRegion();
+ }
+
+ @Test
+ public void testNoAnyBloomFilter() throws IOException {
+ setFaulty(BlockType.GENERAL_BLOOM_META);
+ setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
+ testRegion();
+ }
+}