You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2017/04/26 22:52:15 UTC
[08/40] hbase git commit: HBASE-17914 Create a new reader instead of
cloning a new StoreFile when compaction
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index f2d00b3..b839fc3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -753,9 +753,6 @@ public class TestStripeCompactionPolicy {
when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(),
anyBoolean())).thenReturn(mock(StoreFileScanner.class));
when(sf.getReader()).thenReturn(r);
- when(sf.createReader(anyBoolean())).thenReturn(r);
- when(sf.createReader()).thenReturn(r);
- when(sf.cloneForReader()).thenReturn(sf);
return sf;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index 54f310d..17ab004 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -153,7 +153,7 @@ public class TestHBaseFsckEncryption {
private byte[] extractHFileKey(Path path) throws Exception {
HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
- new CacheConfig(conf), conf);
+ new CacheConfig(conf), true, conf);
try {
reader.loadFileInfo();
Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/BulkLoadSuite.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/BulkLoadSuite.scala b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/BulkLoadSuite.scala
index 795ce6d..d2b707e 100644
--- a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/BulkLoadSuite.scala
+++ b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/BulkLoadSuite.scala
@@ -390,7 +390,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val f1FileList = fs.listStatus(new Path(stagingFolder.getPath +"/f1"))
for ( i <- 0 until f1FileList.length) {
val reader = HFile.createReader(fs, f1FileList(i).getPath,
- new CacheConfig(config), config)
+ new CacheConfig(config), true, config)
assert(reader.getCompressionAlgorithm.getName.equals("gz"))
assert(reader.getDataBlockEncoding.name().equals("PREFIX"))
}
@@ -400,7 +400,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val f2FileList = fs.listStatus(new Path(stagingFolder.getPath +"/f2"))
for ( i <- 0 until f2FileList.length) {
val reader = HFile.createReader(fs, f2FileList(i).getPath,
- new CacheConfig(config), config)
+ new CacheConfig(config), true, config)
assert(reader.getCompressionAlgorithm.getName.equals("none"))
assert(reader.getDataBlockEncoding.name().equals("NONE"))
}
@@ -869,7 +869,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val f1FileList = fs.listStatus(new Path(stagingFolder.getPath +"/f1"))
for ( i <- 0 until f1FileList.length) {
val reader = HFile.createReader(fs, f1FileList(i).getPath,
- new CacheConfig(config), config)
+ new CacheConfig(config), true, config)
assert(reader.getCompressionAlgorithm.getName.equals("gz"))
assert(reader.getDataBlockEncoding.name().equals("PREFIX"))
}
@@ -879,7 +879,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val f2FileList = fs.listStatus(new Path(stagingFolder.getPath +"/f2"))
for ( i <- 0 until f2FileList.length) {
val reader = HFile.createReader(fs, f2FileList(i).getPath,
- new CacheConfig(config), config)
+ new CacheConfig(config), true, config)
assert(reader.getCompressionAlgorithm.getName.equals("none"))
assert(reader.getDataBlockEncoding.name().equals("NONE"))
}