You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2018/10/16 21:03:37 UTC

[2/8] hbase git commit: HBASE-21263 Mention compression algorithm along with other storefile details

HBASE-21263 Mention compression algorithm along with other storefile details

Signed-off-by: Andrew Purtell <ap...@apache.org>
Amending-Author: Andrew Purtell <ap...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6511dae0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6511dae0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6511dae0

Branch: refs/heads/branch-2
Commit: 6511dae07a5a13c75055defbdca45875e4a35d09
Parents: 85d81fe
Author: subrat.mishra <su...@yahoo.com>
Authored: Mon Oct 15 18:33:48 2018 +0530
Committer: Andrew Purtell <ap...@apache.org>
Committed: Tue Oct 16 12:47:11 2018 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hbase/regionserver/HStore.java   | 9 +++++----
 .../hadoop/hbase/regionserver/compactions/Compactor.java    | 4 +++-
 .../java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java | 8 +++-----
 .../org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java    | 7 ++++---
 .../apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java   | 2 +-
 .../hadoop/hbase/regionserver/CreateRandomStoreFile.java    | 2 +-
 6 files changed, 17 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index f3565c1..5216fe6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -321,10 +321,11 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
       confPrintThreshold = 10;
     }
     this.parallelPutCountPrintThreshold = confPrintThreshold;
-    LOG.info("Store={},  memstore type={}, storagePolicy={}, verifyBulkLoads={}, " +
-            "parallelPutCountPrintThreshold={}", getColumnFamilyName(),
-        this.memstore.getClass().getSimpleName(), policyName,
-        this.verifyBulkLoads, this.parallelPutCountPrintThreshold);
+    LOG.info("Store={},  memstore type={}, storagePolicy={}, verifyBulkLoads={}, "
+            + "parallelPutCountPrintThreshold={}, encoding={}, compression={}",
+        getColumnFamilyName(), memstore.getClass().getSimpleName(), policyName, verifyBulkLoads,
+        parallelPutCountPrintThreshold, family.getDataBlockEncoding(),
+        family.getCompressionType());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index d6e5510..83690a9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -199,12 +199,14 @@ public abstract class Compactor<T extends CellSink> {
       }
       tmp = fileInfo.get(TIMERANGE_KEY);
       fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: TimeRangeTracker.parseFrom(tmp).getMax();
-      LOG.debug("Compacting {}, keycount={}, bloomtype={}, size={}, encoding={}, seqNum={}{}",
+      LOG.debug("Compacting {}, keycount={}, bloomtype={}, size={}, "
+              + "encoding={}, compression={}, seqNum={}{}",
           (file.getPath() == null? null: file.getPath().getName()),
           keyCount,
           r.getBloomFilterType().toString(),
           TraditionalBinaryPrefix.long2String(r.length(), "", 1),
           r.getHFileReader().getDataBlockEncoding(),
+          compactionCompression,
           seqNum,
           (allFiles? ", earliestPutTs=" + earliestPutTs: ""));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
index dd8ebb3..de28422 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
@@ -299,11 +299,9 @@ public class TestChecksum {
         long expectedChunks = ChecksumUtil.numChunks(
                                dataSize + HConstants.HFILEBLOCK_HEADER_SIZE,
                                bytesPerChecksum);
-        LOG.info("testChecksumChunks: pread=" + pread +
-                   ", bytesPerChecksum=" + bytesPerChecksum +
-                   ", fileSize=" + totalSize +
-                   ", dataSize=" + dataSize +
-                   ", expectedChunks=" + expectedChunks);
+        LOG.info("testChecksumChunks: pread={}, bytesPerChecksum={}, fileSize={}, "
+                + "dataSize={}, expectedChunks={}, compression={}", pread, bytesPerChecksum,
+            totalSize, dataSize, expectedChunks, algo.toString());
 
         // Verify hbase checksums.
         assertEquals(true, hfs.useHBaseChecksum());

http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index f4e6696..a588341 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -376,6 +376,8 @@ public class TestHFileBlock {
     for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
       for (boolean pread : new boolean[] { false, true }) {
         for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
+          LOG.info("testDataBlockEncoding: Compression algorithm={}, pread={}, dataBlockEncoder={}",
+              algo.toString(), pread, encoding);
           Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_"
               + algo + "_" + encoding.toString());
           FSDataOutputStream os = fs.create(path);
@@ -534,9 +536,8 @@ public class TestHFileBlock {
       for (boolean pread : BOOLEAN_VALUES) {
         for (boolean cacheOnWrite : BOOLEAN_VALUES) {
           Random rand = defaultRandom();
-          LOG.info("testPreviousOffset:Compression algorithm: " + algo +
-                   ", pread=" + pread +
-                   ", cacheOnWrite=" + cacheOnWrite);
+          LOG.info("testPreviousOffset: Compression algorithm={}, pread={}, cacheOnWrite={}",
+              algo.toString(), pread, cacheOnWrite);
           Path path = new Path(TEST_UTIL.getDataTestDir(), "prev_offset");
           List<Long> expectedOffsets = new ArrayList<>();
           List<Long> expectedPrevOffsets = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index be35c74..890ea72 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -202,7 +202,7 @@ public class TestHFileBlockIndex {
 
   private void readIndex(boolean useTags) throws IOException {
     long fileSize = fs.getFileStatus(path).getLen();
-    LOG.info("Size of " + path + ": " + fileSize);
+    LOG.info("Size of {}: {} compression={}", path, fileSize, compr.toString());
 
     FSDataInputStream istream = fs.open(path);
     HFileContext meta = new HFileContextBuilder()

http://git-wip-us.apache.org/repos/asf/hbase/blob/6511dae0/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
index 21c6d6e..8a512e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
@@ -232,7 +232,7 @@ public class CreateRandomStoreFile {
 
     Path storeFilePath = sfw.getPath();
     long fileSize = fs.getFileStatus(storeFilePath).getLen();
-    LOG.info("Created " + storeFilePath + ", " + fileSize + " bytes");
+    LOG.info("Created {}, {} bytes, compression={}", storeFilePath, fileSize, compr.toString());
 
     return true;
   }