You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by op...@apache.org on 2018/10/22 01:44:07 UTC
hbase git commit: HBASE-21355 HStore's storeSize is calculated
repeatedly which causing the confusing region split
Repository: hbase
Updated Branches:
refs/heads/master 7d7293049 -> 77ac352a9
HBASE-21355 HStore's storeSize is calculated repeatedly which causing the confusing region split
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/77ac352a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/77ac352a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/77ac352a
Branch: refs/heads/master
Commit: 77ac352a9510ac3806cf234358b31b6ab6db5dae
Parents: 7d72930
Author: huzheng <op...@gmail.com>
Authored: Sun Oct 21 16:19:48 2018 +0800
Committer: huzheng <op...@gmail.com>
Committed: Mon Oct 22 09:43:14 2018 +0800
----------------------------------------------------------------------
.../hadoop/hbase/regionserver/HStore.java | 31 ++++++++--------
.../apache/hadoop/hbase/client/TestAdmin2.java | 39 ++++++++++++++++++++
2 files changed, 54 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/77ac352a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 436e967..694b2b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -302,7 +302,10 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
}
this.storeEngine = createStoreEngine(this, this.conf, this.comparator);
- this.storeEngine.getStoreFileManager().loadFiles(loadStoreFiles());
+ List<HStoreFile> hStoreFiles = loadStoreFiles();
+ this.storeSize.addAndGet(getStorefilesSize(hStoreFiles, sf -> true));
+ this.totalUncompressedBytes.addAndGet(getTotalUmcompressedBytes(hStoreFiles));
+ this.storeEngine.getStoreFileManager().loadFiles(hStoreFiles);
// Initialize checksum type from name. The names are CRC32, CRC32C, etc.
this.checksumType = getChecksumType(conf);
@@ -576,10 +579,6 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
try {
HStoreFile storeFile = completionService.take().get();
if (storeFile != null) {
- long length = storeFile.getReader().length();
- this.storeSize.addAndGet(length);
- this.totalUncompressedBytes
- .addAndGet(storeFile.getReader().getTotalUncompressedBytes());
LOG.debug("loaded {}", storeFile);
results.add(storeFile);
}
@@ -2158,24 +2157,24 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
@Override
public long getStorefilesSize() {
// Include all StoreFiles
- return getStorefilesSize(storeFile -> true);
+ return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(), sf -> true);
}
@Override
public long getHFilesSize() {
// Include only StoreFiles which are HFiles
- return getStorefilesSize(storeFile -> storeFile.isHFile());
+ return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(),
+ HStoreFile::isHFile);
}
- private long getStorefilesSize(Predicate<HStoreFile> predicate) {
- return this.storeEngine.getStoreFileManager().getStorefiles().stream().filter(sf -> {
- if (sf.getReader() == null) {
- LOG.warn("StoreFile {} has a null Reader", sf);
- return false;
- } else {
- return true;
- }
- }).filter(predicate).mapToLong(sf -> sf.getReader().length()).sum();
+ private long getTotalUmcompressedBytes(List<HStoreFile> files) {
+ return files.stream().filter(f -> f != null && f.getReader() != null)
+ .mapToLong(f -> f.getReader().getTotalUncompressedBytes()).sum();
+ }
+
+ private long getStorefilesSize(Collection<HStoreFile> files, Predicate<HStoreFile> predicate) {
+ return files.stream().filter(f -> f != null && f.getReader() != null).filter(predicate)
+ .mapToLong(f -> f.getReader().length()).sum();
}
private long getStoreFileFieldSize(ToLongFunction<StoreFileReader> f) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/77ac352a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 6fc59be..4ab1a8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -48,10 +48,12 @@ import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.constraint.ConstraintException;
+import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -59,6 +61,7 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.junit.After;
import org.junit.AfterClass;
+import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -754,4 +757,40 @@ public class TestAdmin2 {
}
}
}
+
+ /**
+ * TestCase for HBASE-21355
+ */
+ @Test
+ public void testGetRegionInfo() throws Exception {
+ final TableName tableName = TableName.valueOf(name.getMethodName());
+ Table table = TEST_UTIL.createTable(tableName, Bytes.toBytes("f"));
+ for (int i = 0; i < 100; i++) {
+ table.put(new Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("f"), Bytes.toBytes("q"),
+ Bytes.toBytes(i)));
+ }
+ admin.flush(tableName);
+
+ HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(table.getName());
+ List<HRegion> regions = rs.getRegions(tableName);
+ Assert.assertEquals(1, regions.size());
+
+ HRegion region = regions.get(0);
+ byte[] regionName = region.getRegionInfo().getRegionName();
+ HStore store = region.getStore(Bytes.toBytes("f"));
+ long expectedStoreFilesSize = store.getStorefilesSize();
+ Assert.assertNotNull(store);
+ Assert.assertEquals(expectedStoreFilesSize, store.getSize());
+
+ ClusterConnection conn = ((ClusterConnection) admin.getConnection());
+ HBaseRpcController controller = conn.getRpcControllerFactory().newController();
+ for (int i = 0; i < 10; i++) {
+ RegionInfo ri =
+ ProtobufUtil.getRegionInfo(controller, conn.getAdmin(rs.getServerName()), regionName);
+ Assert.assertEquals(region.getRegionInfo(), ri);
+
+ // Make sure that the store size is still the actual file system's store size.
+ Assert.assertEquals(expectedStoreFilesSize, store.getSize());
+ }
+ }
}