You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2016/01/12 21:17:04 UTC
[3/4] hbase git commit: HBASE-15085 IllegalStateException was thrown
when scanning on bulkloaded HFiles (Victor Xu)
HBASE-15085 IllegalStateException was thrown when scanning on bulkloaded
HFiles (Victor Xu)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/840f5ea6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/840f5ea6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/840f5ea6
Branch: refs/heads/hbase-12439
Commit: 840f5ea686e50c4b8d81b3fe1df2f3a7bd807636
Parents: 83c506d
Author: ramkrishna <ra...@gmail.com>
Authored: Tue Jan 12 14:36:48 2016 +0530
Committer: ramkrishna <ra...@gmail.com>
Committed: Tue Jan 12 14:36:48 2016 +0530
----------------------------------------------------------------------
.../hbase/mapreduce/LoadIncrementalHFiles.java | 6 +++
.../mapreduce/TestLoadIncrementalHFiles.java | 46 ++++++++++++++++++++
.../apache/hadoop/hbase/util/HFileTestUtil.java | 25 +++++++++--
3 files changed, 73 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/840f5ea6/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 172dfe3..5d75d56 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -929,6 +930,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
}
private static boolean shouldCopyHFileMetaKey(byte[] key) {
+ // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
+ if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
+ return false;
+ }
+
return !HFile.isReservedFileInfoKey(key);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/840f5ea6/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 8c64ff8..6dc8566 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@@ -482,6 +483,51 @@ public class TestLoadIncrementalHFiles {
assertEquals(1000, rowCount);
}
+ @Test
+ public void testSplitStoreFileWithNoneToNone() throws IOException {
+ testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
+ }
+
+ @Test
+ public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
+ testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
+ }
+
+ @Test
+ public void testSplitStoreFileWithEncodedToNone() throws IOException {
+ testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
+ }
+
+ @Test
+ public void testSplitStoreFileWithNoneToEncoded() throws IOException {
+ testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
+ }
+
+ private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
+ DataBlockEncoding cfEncoding) throws IOException {
+ Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
+ FileSystem fs = util.getTestFileSystem();
+ Path testIn = new Path(dir, "testhfile");
+ HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
+ familyDesc.setDataBlockEncoding(cfEncoding);
+ HFileTestUtil.createHFileWithDataBlockEncoding(
+ util.getConfiguration(), fs, testIn, bulkloadEncoding,
+ FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
+
+ Path bottomOut = new Path(dir, "bottom.out");
+ Path topOut = new Path(dir, "top.out");
+
+ LoadIncrementalHFiles.splitStoreFile(
+ util.getConfiguration(), testIn,
+ familyDesc, Bytes.toBytes("ggg"),
+ bottomOut,
+ topOut);
+
+ int rowCount = verifyHFile(bottomOut);
+ rowCount += verifyHFile(topOut);
+ assertEquals(1000, rowCount);
+ }
+
private int verifyHFile(Path p) throws IOException {
Configuration conf = util.getConfiguration();
HFile.Reader reader = HFile.createReader(
http://git-wip-us.apache.org/repos/asf/hbase/blob/840f5ea6/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
index 964d6ed..fbd79c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
@@ -59,7 +60,21 @@ public class HFileTestUtil {
FileSystem fs, Path path,
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException {
- createHFile(configuration, fs, path, family, qualifier, startKey, endKey,
+ createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
+ startKey, endKey, numRows, false);
+ }
+
+ /**
+ * Create an HFile with the given number of rows between a given
+ * start key and end key @ family:qualifier. The value will be the key value.
+ * This file will use certain data block encoding algorithm.
+ */
+ public static void createHFileWithDataBlockEncoding(
+ Configuration configuration,
+ FileSystem fs, Path path, DataBlockEncoding encoding,
+ byte[] family, byte[] qualifier,
+ byte[] startKey, byte[] endKey, int numRows) throws IOException {
+ createHFile(configuration, fs, path, encoding, family, qualifier, startKey, endKey,
numRows, false);
}
@@ -73,7 +88,8 @@ public class HFileTestUtil {
FileSystem fs, Path path,
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException {
- createHFile(configuration, fs, path, family, qualifier, startKey, endKey, numRows, true);
+ createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
+ startKey, endKey, numRows, true);
}
/**
@@ -84,11 +100,12 @@ public class HFileTestUtil {
*/
public static void createHFile(
Configuration configuration,
- FileSystem fs, Path path,
+ FileSystem fs, Path path, DataBlockEncoding encoding,
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException {
HFileContext meta = new HFileContextBuilder()
.withIncludesTags(withTag)
+ .withDataBlockEncoding(encoding)
.build();
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
.withPath(fs, path)
@@ -141,4 +158,4 @@ public class HFileTestUtil {
}
}
}
-}
\ No newline at end of file
+}