You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2020/08/06 21:50:50 UTC
[hbase] branch master updated: HBASE-24659 Calculate FIXED_OVERHEAD
automatically (#2018)
This is an automated email from the ASF dual-hosted git repository.
stack pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new f710d2d HBASE-24659 Calculate FIXED_OVERHEAD automatically (#2018)
f710d2d is described below
commit f710d2d65463983f4f5c1c1d5d97ab3b36f3ec3a
Author: niuyulin <ny...@163.com>
AuthorDate: Fri Aug 7 05:50:32 2020 +0800
HBASE-24659 Calculate FIXED_OVERHEAD automatically (#2018)
Co-authored-by: niuyulin <ni...@xiaomi.com>
SIgned-off-by: Duo Zhang <zh...@apache.org>
Signed-off-by: stack <st...@apache.org>
---
.../apache/hadoop/hbase/io/hfile/HFileContext.java | 8 +----
.../hadoop/hbase/io/hfile/BlockCacheKey.java | 11 ++-----
.../apache/hadoop/hbase/io/hfile/HFileBlock.java | 9 +-----
.../hadoop/hbase/io/hfile/LruBlockCache.java | 34 +++++++++++++---------
.../apache/hadoop/hbase/regionserver/HRegion.java | 7 +----
.../apache/hadoop/hbase/regionserver/HStore.java | 4 +--
.../org/apache/hadoop/hbase/io/TestHeapSize.java | 16 +++++++++-
7 files changed, 43 insertions(+), 46 deletions(-)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
index ea4782d..cfadb6c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
@@ -37,13 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class HFileContext implements HeapSize, Cloneable {
- public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
- // Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
- 5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
- // usesHBaseChecksum, includesMvcc, includesTags and compressTags
- 4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
- //byte[] headers for column family and table name
- 2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
+ public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false);
private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
index 58d5c00..4683c35 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io.hfile;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
/**
@@ -42,7 +41,8 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
this(hfileName, offset, true, BlockType.DATA);
}
- public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) {
+ public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica,
+ BlockType blockType) {
this.isPrimaryReplicaBlock = isPrimaryReplica;
this.hfileName = hfileName;
this.offset = offset;
@@ -71,12 +71,7 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
return this.hfileName + '_' + this.offset;
}
- public static final long FIXED_OVERHEAD = ClassSize.align(
- ClassSize.OBJECT +
- Bytes.SIZEOF_BOOLEAN +
- ClassSize.REFERENCE + // this.hfileName
- ClassSize.REFERENCE + // this.blockType
- Bytes.SIZEOF_LONG); // this.offset
+ public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false);
/**
* Strings have two bytes per character due to default Java Unicode encoding
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 6b14571..f4fdb9b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -113,14 +113,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
- public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
- // BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
- 5 * ClassSize.REFERENCE +
- // On-disk size, uncompressed size, and next block's on-disk size
- // bytePerChecksum and onDiskDataSize
- 4 * Bytes.SIZEOF_INT +
- // This and previous block offset
- 2 * Bytes.SIZEOF_LONG);
+ public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false);
// Block Header fields.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 2978eed..b2016ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -37,7 +37,6 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
@@ -435,7 +434,7 @@ public class LruBlockCache implements FirstLevelBlockCache {
map.put(cacheKey, cb);
long val = elements.incrementAndGet();
if (buf.getBlockType().isData()) {
- dataBlockElements.increment();
+ dataBlockElements.increment();
}
if (LOG.isTraceEnabled()) {
long size = map.size();
@@ -492,7 +491,7 @@ public class LruBlockCache implements FirstLevelBlockCache {
heapsize *= -1;
}
if (bt != null && bt.isData()) {
- dataBlockSize.add(heapsize);
+ dataBlockSize.add(heapsize);
}
return size.addAndGet(heapsize);
}
@@ -578,8 +577,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
int numEvicted = 0;
for (BlockCacheKey key : map.keySet()) {
if (key.getHfileName().equals(hfileName)) {
- if (evictBlock(key))
+ if (evictBlock(key)) {
++numEvicted;
+ }
}
}
if (victimHandler != null) {
@@ -652,7 +652,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
void evict() {
// Ensure only one eviction at a time
- if(!evictionLock.tryLock()) return;
+ if (!evictionLock.tryLock()) {
+ return;
+ }
try {
evictionInProgress = true;
@@ -665,7 +667,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
StringUtils.byteDesc(currentSize));
}
- if (bytesToFree <= 0) return;
+ if (bytesToFree <= 0) {
+ return;
+ }
// Instantiate priority buckets
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
@@ -940,7 +944,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
}
}
LruBlockCache cache = this.cache.get();
- if (cache == null) break;
+ if (cache == null) {
+ break;
+ }
cache.evict();
}
}
@@ -1017,10 +1023,8 @@ public class LruBlockCache implements FirstLevelBlockCache {
return this.stats;
}
- public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
- (4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) +
- (6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
- + ClassSize.OBJECT);
+ public final static long CACHE_FIXED_OVERHEAD =
+ ClassSize.estimateBase(LruBlockCache.class, false);
@Override
public long heapSize() {
@@ -1088,9 +1092,13 @@ public class LruBlockCache implements FirstLevelBlockCache {
@Override
public int compareTo(CachedBlock other) {
int diff = this.getFilename().compareTo(other.getFilename());
- if (diff != 0) return diff;
+ if (diff != 0) {
+ return diff;
+ }
diff = Long.compare(this.getOffset(), other.getOffset());
- if (diff != 0) return diff;
+ if (diff != 0) {
+ return diff;
+ }
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 219252a..ee2ffc4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -8456,12 +8456,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
return cells;
}
- public static final long FIXED_OVERHEAD = ClassSize.align(
- ClassSize.OBJECT +
- 56 * ClassSize.REFERENCE +
- 3 * Bytes.SIZEOF_INT +
- 14 * Bytes.SIZEOF_LONG +
- 3 * Bytes.SIZEOF_BOOLEAN);
+ public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false);
// woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 8116507..a05d4a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -2566,9 +2566,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation,
return this.cacheConf;
}
- public static final long FIXED_OVERHEAD =
- ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
- + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
+ public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false);
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
+ ClassSize.OBJECT + ClassSize.REENTRANT_LOCK
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index 108de70..3d71305 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -602,5 +602,19 @@ public class TestHeapSize {
assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
}
}
-}
+ @Test
+ public void testAutoCalcFixedOverHead() {
+ Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class,
+ HFileBlock.class, HStore.class, LruBlockCache.class };
+ for (Class cl : classList) {
+ // do estimate in advance to ensure class is loaded
+ ClassSize.estimateBase(cl, false);
+
+ long startTime = System.currentTimeMillis();
+ ClassSize.estimateBase(cl, false);
+ long endTime = System.currentTimeMillis();
+ assertTrue(endTime - startTime < 5);
+ }
+ }
+}