You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2016/11/01 19:50:58 UTC

[04/50] hbase git commit: HBASE-15085 IllegalStateException was thrown when scanning on bulkloaded HFiles (Victor Xu)

HBASE-15085 IllegalStateException was thrown when scanning on bulkloaded
HFiles (Victor Xu)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ccc8e4a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ccc8e4a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ccc8e4a2

Branch: refs/heads/branch-1.0
Commit: ccc8e4a23862a2c44752e479cbf1f1bba1814511
Parents: 556741b
Author: ramkrishna <ra...@gmail.com>
Authored: Tue Jan 12 14:36:48 2016 +0530
Committer: ramkrishna <ra...@gmail.com>
Committed: Tue Jan 12 14:40:27 2016 +0530

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  6 +++
 .../mapreduce/TestLoadIncrementalHFiles.java    | 46 ++++++++++++++++++++
 .../apache/hadoop/hbase/util/HFileTestUtil.java | 25 +++++++++--
 3 files changed, 73 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/ccc8e4a2/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 23b927b..093e30b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -85,6 +85,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -859,6 +860,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
   }
 
   private static boolean shouldCopyHFileMetaKey(byte[] key) {
+    // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
+    if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
+      return false;
+    }
+
     return !HFile.isReservedFileInfoKey(key);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ccc8e4a2/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 96fc9bb..ed75e5e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@@ -471,6 +472,51 @@ public class TestLoadIncrementalHFiles {
     assertEquals(1000, rowCount);
   }
 
+  @Test
+  public void testSplitStoreFileWithNoneToNone() throws IOException {
+    testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
+  }
+
+  @Test
+  public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
+    testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
+  }
+
+  @Test
+  public void testSplitStoreFileWithEncodedToNone() throws IOException {
+    testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
+  }
+
+  @Test
+  public void testSplitStoreFileWithNoneToEncoded() throws IOException {
+    testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
+  }
+
+  private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
+      DataBlockEncoding cfEncoding) throws IOException {
+    Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
+    FileSystem fs = util.getTestFileSystem();
+    Path testIn = new Path(dir, "testhfile");
+    HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
+    familyDesc.setDataBlockEncoding(cfEncoding);
+    HFileTestUtil.createHFileWithDataBlockEncoding(
+        util.getConfiguration(), fs, testIn, bulkloadEncoding,
+        FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
+
+    Path bottomOut = new Path(dir, "bottom.out");
+    Path topOut = new Path(dir, "top.out");
+
+    LoadIncrementalHFiles.splitStoreFile(
+        util.getConfiguration(), testIn,
+        familyDesc, Bytes.toBytes("ggg"),
+        bottomOut,
+        topOut);
+
+    int rowCount = verifyHFile(bottomOut);
+    rowCount += verifyHFile(topOut);
+    assertEquals(1000, rowCount);
+  }
+
   private int verifyHFile(Path p) throws IOException {
     Configuration conf = util.getConfiguration();
     HFile.Reader reader = HFile.createReader(

http://git-wip-us.apache.org/repos/asf/hbase/blob/ccc8e4a2/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
index c2c938f..028937c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
@@ -57,7 +58,21 @@ public class HFileTestUtil {
       FileSystem fs, Path path,
       byte[] family, byte[] qualifier,
       byte[] startKey, byte[] endKey, int numRows) throws IOException {
-    createHFile(configuration, fs, path, family, qualifier, startKey, endKey,
+      createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
+        startKey, endKey, numRows, false);
+  }
+
+  /**
+   * Create an HFile with the given number of rows between a given
+   * start key and end key @ family:qualifier.  The value will be the key value.
+   * This file will use certain data block encoding algorithm.
+   */
+  public static void createHFileWithDataBlockEncoding(
+      Configuration configuration,
+      FileSystem fs, Path path, DataBlockEncoding encoding,
+      byte[] family, byte[] qualifier,
+      byte[] startKey, byte[] endKey, int numRows) throws IOException {
+      createHFile(configuration, fs, path, encoding, family, qualifier, startKey, endKey,
         numRows, false);
   }
 
@@ -71,7 +86,8 @@ public class HFileTestUtil {
       FileSystem fs, Path path,
       byte[] family, byte[] qualifier,
       byte[] startKey, byte[] endKey, int numRows) throws IOException {
-    createHFile(configuration, fs, path, family, qualifier, startKey, endKey, numRows, true);
+      createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
+        startKey, endKey, numRows, true);
   }
 
   /**
@@ -82,11 +98,12 @@ public class HFileTestUtil {
    */
   public static void createHFile(
       Configuration configuration,
-      FileSystem fs, Path path,
+      FileSystem fs, Path path, DataBlockEncoding encoding,
       byte[] family, byte[] qualifier,
       byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException {
     HFileContext meta = new HFileContextBuilder()
         .withIncludesTags(withTag)
+        .withDataBlockEncoding(encoding)
         .build();
     HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
         .withPath(fs, path)
@@ -150,4 +167,4 @@ public class HFileTestUtil {
       }
     }
   }
-}
\ No newline at end of file
+}