You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2015/10/02 04:42:18 UTC

[1/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Repository: hbase
Updated Branches:
  refs/heads/0.98 1ae5c0450 -> f4bd1313c
  refs/heads/branch-1 7aaef0f92 -> 9b297493e
  refs/heads/branch-1.0 fba3f60be -> dfc61c6ec
  refs/heads/branch-1.1 7654fcd6f -> fe40a4b78
  refs/heads/branch-1.2 528a56b40 -> eab91be42
  refs/heads/master ef7001c0e -> 030ae5f04


HBASE-13143 TestCacheOnWrite is flaky and needs a diet


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/030ae5f0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/030ae5f0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/030ae5f0

Branch: refs/heads/master
Commit: 030ae5f0415b97e5da688c1432ed53fd56990194
Parents: ef7001c
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 18:34:11 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/030ae5f0/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index bfa5b87..2c957ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -93,8 +93,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -140,42 +138,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData + "]";
+        ", cacheCompressedData=" + cacheCompressedData + "]";
     LOG.info(testDescription);
   }
 
@@ -201,20 +173,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -280,7 +249,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -292,8 +262,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     List<Long> cachedBlocksOffset = new ArrayList<Long>();
     Map<Long, HFileBlock> cachedBlocks = new HashMap<Long, HFileBlock>();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
@@ -331,10 +300,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -349,13 +315,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -407,7 +371,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(CellComparator.COMPARATOR)
@@ -454,7 +419,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTime();


[5/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Posted by ap...@apache.org.
HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dfc61c6e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dfc61c6e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dfc61c6e

Branch: refs/heads/branch-1.0
Commit: dfc61c6ec6545a5eea51a0006eb5fdfac92edb8c
Parents: fba3f60
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 19:02:45 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/dfc61c6e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index c7017e2..c9590f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -87,8 +87,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -134,42 +132,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData + "]";
+        ", cacheCompressedData=" + cacheCompressedData + "]";
     LOG.info(testDescription);
   }
 
@@ -195,20 +167,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -279,7 +248,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -291,8 +261,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       long onDiskSize = -1;
       if (prevBlock != null) {
@@ -326,10 +295,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -344,13 +310,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -385,7 +349,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
@@ -437,7 +402,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTime();


[4/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Posted by ap...@apache.org.
HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fe40a4b7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fe40a4b7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fe40a4b7

Branch: refs/heads/branch-1.1
Commit: fe40a4b7808cc45d68137486e9f9ce553f037cde
Parents: 7654fcd
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 19:02:42 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/fe40a4b7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index a025e6c..0562f4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -88,8 +88,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -135,42 +133,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData + "]";
+        ", cacheCompressedData=" + cacheCompressedData + "]";
     LOG.info(testDescription);
   }
 
@@ -196,20 +168,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -280,7 +249,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -292,8 +262,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       long onDiskSize = -1;
       if (prevBlock != null) {
@@ -327,10 +296,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -345,13 +311,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -386,7 +350,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
@@ -438,7 +403,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTime();


[3/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Posted by ap...@apache.org.
HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eab91be4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eab91be4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eab91be4

Branch: refs/heads/branch-1.2
Commit: eab91be4295e412ef9be0965fb3638a182fef3e1
Parents: 528a56b
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 19:02:38 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/eab91be4/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index d2bfa7e..91a00e4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -88,8 +88,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -135,42 +133,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData + "]";
+        ", cacheCompressedData=" + cacheCompressedData + "]";
     LOG.info(testDescription);
   }
 
@@ -196,20 +168,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -281,7 +250,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -293,8 +263,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       long onDiskSize = -1;
       if (prevBlock != null) {
@@ -328,10 +297,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -346,13 +312,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -387,7 +351,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
@@ -439,7 +404,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTime();


[2/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Posted by ap...@apache.org.
HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9b297493
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9b297493
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9b297493

Branch: refs/heads/branch-1
Commit: 9b297493e2d87bfd3f93005fe16d26cdf847b0c3
Parents: 7aaef0f
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 18:45:48 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/9b297493/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index d2bfa7e..91a00e4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -88,8 +88,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -135,42 +133,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData + "]";
+        ", cacheCompressedData=" + cacheCompressedData + "]";
     LOG.info(testDescription);
   }
 
@@ -196,20 +168,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -281,7 +250,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -293,8 +263,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       long onDiskSize = -1;
       if (prevBlock != null) {
@@ -328,10 +297,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -346,13 +312,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -387,7 +351,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
@@ -439,7 +404,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTime();


[6/6] hbase git commit: HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Posted by ap...@apache.org.
HBASE-13143 TestCacheOnWrite is flaky and needs a diet

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f4bd1313
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f4bd1313
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f4bd1313

Branch: refs/heads/0.98
Commit: f4bd1313c20645f7a145f55cf147a29f594b030b
Parents: 1ae5c04
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Oct 1 16:29:54 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Oct 1 19:09:11 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 67 +++++---------------
 1 file changed, 16 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/f4bd1313/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index d7f18dd..70cb569 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -87,8 +87,6 @@ public class TestCacheOnWrite {
 
   private final CacheOnWriteType cowType;
   private final Compression.Algorithm compress;
-  private final BlockEncoderTestType encoderType;
-  private final HFileDataBlockEncoder encoder;
   private final boolean cacheCompressedData;
 
   private static final int DATA_BLOCK_SIZE = 2048;
@@ -134,42 +132,16 @@ public class TestCacheOnWrite {
         conf.setBoolean(cowType.confKey, cowType == this);
       }
     }
-
-  }
-
-  private static final DataBlockEncoding ENCODING_ALGO =
-      DataBlockEncoding.PREFIX;
-
-  /** Provides fancy names for three combinations of two booleans */
-  private static enum BlockEncoderTestType {
-    NO_BLOCK_ENCODING_NOOP(true, false),
-    NO_BLOCK_ENCODING(false, false),
-    BLOCK_ENCODING_EVERYWHERE(false, true);
-
-    private final boolean noop;
-    private final boolean encode;
-
-    BlockEncoderTestType(boolean noop, boolean encode) {
-      this.encode = encode;
-      this.noop = noop;
-    }
-
-    public HFileDataBlockEncoder getEncoder() {
-      return noop ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(
-        encode ? ENCODING_ALGO : DataBlockEncoding.NONE);
-    }
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache) {
+      boolean cacheCompressedData, BlockCache blockCache) {
     this.cowType = cowType;
     this.compress = compress;
-    this.encoderType = encoderType;
-    this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData +
+        ", cacheCompressedData=" + cacheCompressedData +
         ", blockCache=" + blockCache.getClass().getSimpleName() + "]";
     LOG.info(testDescription);
   }
@@ -196,20 +168,17 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (BlockCache blockache : getBlockCaches()) {
+    List<Object[]> params = new ArrayList<Object[]>();
+    for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
-            for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
-                  blockache });
-            }
+          for (boolean cacheCompressedData : new boolean[] { false, true }) {
+            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
           }
         }
       }
     }
-    return cowTypes;
+    return params;
   }
 
   private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
@@ -273,7 +242,8 @@ public class TestCacheOnWrite {
     LOG.info("HFile information: " + reader);
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
       .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-      .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+      .withBlockSize(DATA_BLOCK_SIZE)
+      .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
       .withIncludesTags(useTags).build();
     final boolean cacheBlocks = false;
     final boolean pread = false;
@@ -285,8 +255,7 @@ public class TestCacheOnWrite {
     EnumMap<BlockType, Integer> blockCountByType =
         new EnumMap<BlockType, Integer>(BlockType.class);
 
-    DataBlockEncoding encodingInCache =
-        encoderType.getEncoder().getDataBlockEncoding();
+    DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       long onDiskSize = -1;
       if (prevBlock != null) {
@@ -320,10 +289,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        if (block.getBlockType() == BlockType.ENCODED_DATA) {
-          assertEquals(block.getDataBlockEncodingId(), fromCache.getDataBlockEncodingId());
-          assertEquals(block.getDataBlockEncoding(), fromCache.getDataBlockEncoding());
-        }
+        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(
@@ -338,13 +304,11 @@ public class TestCacheOnWrite {
 
     LOG.info("Block count by type: " + blockCountByType);
     String countByType = blockCountByType.toString();
-    BlockType cachedDataBlockType =
-        encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
-      assertEquals("{" + cachedDataBlockType
+      assertEquals("{" + BlockType.DATA
           + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
@@ -379,7 +343,8 @@ public class TestCacheOnWrite {
         "test_cache_on_write");
     HFileContext meta = new HFileContextBuilder().withCompression(compress)
         .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
-        .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(encoder.getDataBlockEncoding())
+        .withBlockSize(DATA_BLOCK_SIZE)
+        .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
         .withIncludesTags(useTags).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
@@ -431,7 +396,7 @@ public class TestCacheOnWrite {
             .setCompressionType(compress)
             .setBloomFilterType(BLOOM_TYPE)
             .setMaxVersions(maxVersions)
-            .setDataBlockEncoding(encoder.getDataBlockEncoding())
+            .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding())
     );
     int rowIdx = 0;
     long ts = EnvironmentEdgeManager.currentTimeMillis();