You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by op...@apache.org on 2019/05/31 07:15:25 UTC
[hbase] 15/17: HBASE-22435 Add a UT to address the
HFileBlock#heapSize() in TestHeapSize
This is an automated email from the ASF dual-hosted git repository.
openinx pushed a commit to branch HBASE-21879
in repository https://gitbox.apache.org/repos/asf/hbase.git
commit 812042d895b75d952fa40b9d4dc1305346d6b973
Author: huzheng <op...@gmail.com>
AuthorDate: Tue May 21 15:47:19 2019 +0800
HBASE-22435 Add a UT to address the HFileBlock#heapSize() in TestHeapSize
---
.../apache/hadoop/hbase/io/hfile/HFileContext.java | 17 ++++++++-------
.../apache/hadoop/hbase/io/hfile/HFileBlock.java | 24 +++++++++-------------
.../org/apache/hadoop/hbase/io/TestHeapSize.java | 16 +++++++++++++++
3 files changed, 34 insertions(+), 23 deletions(-)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
index b5ccda2..6074f10 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
@@ -34,6 +34,11 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class HFileContext implements HeapSize, Cloneable {
+ public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
+ // Algorithm, checksumType, encoding, Encryption.Context, hfileName reference
+ 5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
+ // usesHBaseChecksum, includesMvcc, includesTags and compressTags
+ 4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG);
public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
@@ -188,19 +193,13 @@ public class HFileContext implements HeapSize, Cloneable {
}
/**
- * HeapSize implementation
- * NOTE : The heapsize should be altered as and when new state variable are added
+ * HeapSize implementation. NOTE : The heapsize should be altered as and when new state variable
+ * are added
* @return heap size of the HFileContext
*/
@Override
public long heapSize() {
- long size = ClassSize.align(ClassSize.OBJECT +
- // Algorithm reference, encodingon, checksumtype, Encryption.Context reference
- 5 * ClassSize.REFERENCE +
- 2 * Bytes.SIZEOF_INT +
- // usesHBaseChecksum, includesMvcc, includesTags and compressTags
- 4 * Bytes.SIZEOF_BOOLEAN +
- Bytes.SIZEOF_LONG);
+ long size = FIXED_OVERHEAD;
if (this.hfileName != null) {
size += ClassSize.STRING + this.hfileName.length();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 92dcf44..846460f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -113,6 +113,14 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
+ public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
+ // BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
+ 5 * ClassSize.REFERENCE +
+ // On-disk size, uncompressed size, and next block's on-disk size
+ // bytePerChecksum and onDiskDataSize
+ 4 * Bytes.SIZEOF_INT +
+ // This and previous block offset
+ 2 * Bytes.SIZEOF_LONG);
// Block Header fields.
@@ -739,24 +747,12 @@ public class HFileBlock implements Cacheable {
@Override
public long heapSize() {
- long size = ClassSize.align(
- ClassSize.OBJECT +
- // Block type, multi byte buffer, MemoryType and meta references
- 4 * ClassSize.REFERENCE +
- // On-disk size, uncompressed size, and next block's on-disk size
- // bytePerChecksum and onDiskDataSize
- 4 * Bytes.SIZEOF_INT +
- // This and previous block offset
- 2 * Bytes.SIZEOF_LONG +
- // Heap size of the meta object. meta will be always not null.
- fileContext.heapSize()
- );
-
+ long size = FIXED_OVERHEAD;
+ size += fileContext.heapSize();
if (buf != null) {
// Deep overhead of the byte buffer. Needs to be aligned separately.
size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);
}
-
return ClassSize.align(size);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index 993503d..71ffb87 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -43,6 +43,8 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
+import org.apache.hadoop.hbase.io.hfile.HFileBlock;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
import org.apache.hadoop.hbase.io.hfile.LruCachedBlock;
import org.apache.hadoop.hbase.regionserver.CSLMImmutableSegment;
@@ -516,6 +518,20 @@ public class TestHeapSize {
}
@Test
+ public void testHFileBlockSize() throws IOException {
+ long expected;
+ long actual;
+
+ actual = HFileContext.FIXED_OVERHEAD;
+ expected = ClassSize.estimateBase(HFileContext.class, false);
+ assertEquals(expected, actual);
+
+ actual = HFileBlock.FIXED_OVERHEAD;
+ expected = ClassSize.estimateBase(HFileBlock.class, false);
+ assertEquals(expected, actual);
+ }
+
+ @Test
public void testMutations(){
Class<?> cl;
long expected;