You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2017/04/19 01:48:24 UTC
[2/3] hbase git commit: HBASE-17914 Create a new reader instead of
cloning a new StoreFile when compaction
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index d72529a..0ba500a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -17,11 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver.compactions;
+import com.google.common.io.Closeables;
+
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -59,8 +60,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
-import com.google.common.io.Closeables;
-
/**
* A compactor is a compaction algorithm associated a given policy. Base class also contains
* reusable parts for implementing compactors (what is common and what isn't is evolving).
@@ -216,15 +215,9 @@ public abstract class Compactor<T extends CellSink> {
* @param filesToCompact Files.
* @return Scanners.
*/
- protected List<StoreFileScanner> createFileScanners(
- final Collection<StoreFile> filesToCompact,
- long smallestReadPoint,
- boolean useDropBehind) throws IOException {
- return StoreFileScanner.getScannersForStoreFiles(filesToCompact,
- /* cache blocks = */ false,
- /* use pread = */ false,
- /* is compaction */ true,
- /* use Drop Behind */ useDropBehind,
+ protected List<StoreFileScanner> createFileScanners(Collection<StoreFile> filesToCompact,
+ long smallestReadPoint, boolean useDropBehind) throws IOException {
+ return StoreFileScanner.getScannersForCompaction(filesToCompact, useDropBehind,
smallestReadPoint);
}
@@ -281,8 +274,6 @@ public abstract class Compactor<T extends CellSink> {
// Find the smallest read point across all the Scanners.
long smallestReadPoint = getSmallestReadPoint();
- List<StoreFileScanner> scanners;
- Collection<StoreFile> readersToClose;
T writer = null;
boolean dropCache;
if (request.isMajor() || request.isAllFiles()) {
@@ -291,22 +282,8 @@ public abstract class Compactor<T extends CellSink> {
dropCache = this.dropCacheMinor;
}
- if (this.conf.getBoolean("hbase.regionserver.compaction.private.readers", true)) {
- // clone all StoreFiles, so we'll do the compaction on a independent copy of StoreFiles,
- // HFiles, and their readers
- readersToClose = new ArrayList<>(request.getFiles().size());
- for (StoreFile f : request.getFiles()) {
- StoreFile clonedStoreFile = f.cloneForReader();
- // create the reader after the store file is cloned in case
- // the sequence id is used for sorting in scanners
- clonedStoreFile.createReader();
- readersToClose.add(clonedStoreFile);
- }
- scanners = createFileScanners(readersToClose, smallestReadPoint, dropCache);
- } else {
- readersToClose = Collections.emptyList();
- scanners = createFileScanners(request.getFiles(), smallestReadPoint, dropCache);
- }
+ List<StoreFileScanner> scanners =
+ createFileScanners(request.getFiles(), smallestReadPoint, dropCache);
InternalScanner scanner = null;
boolean finished = false;
try {
@@ -336,13 +313,6 @@ public abstract class Compactor<T extends CellSink> {
}
} finally {
Closeables.close(scanner, true);
- for (StoreFile f : readersToClose) {
- try {
- f.closeReader(true);
- } catch (IOException e) {
- LOG.warn("Exception closing " + f, e);
- }
- }
if (!finished && writer != null) {
abortWriter(writer);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
index ace45ec..7b745ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
@@ -133,7 +133,7 @@ public class CompressionTest {
writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
writer.close();
Cell cc = null;
- HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf);
try {
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 4eab62b..dca02e4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -849,8 +849,8 @@ public class HBaseFsck extends Configured implements Closeable {
FileStatus[] storeFiles = fs.listStatus(file.getPath());
// For all the stores in this column family.
for (FileStatus storeFile : storeFiles) {
- HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(), new CacheConfig(
- getConf()), getConf());
+ HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),
+ new CacheConfig(getConf()), true, getConf());
if ((reader.getFirstKey() != null)
&& ((storeFirstKey == null) || (comparator.compare(storeFirstKey,
((KeyValue.KeyOnlyKeyValue) reader.getFirstKey()).getKey()) > 0))) {
@@ -954,7 +954,7 @@ public class HBaseFsck extends Configured implements Closeable {
HFile.Reader hf = null;
try {
CacheConfig cacheConf = new CacheConfig(getConf());
- hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
+ hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());
hf.loadFileInfo();
Cell startKv = hf.getFirstKey();
start = CellUtil.cloneRow(startKv);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
index 82200bd..e46e43b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
@@ -98,7 +98,7 @@ public class HFileCorruptionChecker {
protected void checkHFile(Path p) throws IOException {
HFile.Reader r = null;
try {
- r = HFile.createReader(fs, p, cacheConf, conf);
+ r = HFile.createReader(fs, p, cacheConf, true, conf);
} catch (CorruptHFileException che) {
LOG.warn("Found corrupt HFile " + p, che);
corrupted.add(p);
@@ -230,7 +230,7 @@ public class HFileCorruptionChecker {
protected void checkMobFile(Path p) throws IOException {
HFile.Reader r = null;
try {
- r = HFile.createReader(fs, p, cacheConf, conf);
+ r = HFile.createReader(fs, p, cacheConf, true, conf);
} catch (CorruptHFileException che) {
LOG.warn("Found corrupt mob file " + p, che);
corruptedMobFiles.add(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
index 562630a..37ca56b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
@@ -403,7 +403,7 @@ public class HFilePerformanceEvaluation {
@Override
void setUp() throws Exception {
- reader = HFile.createReader(this.fs, this.mf, new CacheConfig(this.conf), this.conf);
+ reader = HFile.createReader(this.fs, this.mf, new CacheConfig(this.conf), true, this.conf);
this.reader.loadFileInfo();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index b1a0d3c..7668aa2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -5174,6 +5174,7 @@ public class TestFromClientSide {
assertEquals(2, store.getStorefilesCount());
store.triggerMajorCompaction();
region.compact(true);
+ store.closeAndArchiveCompactedFiles();
waitForStoreFileCount(store, 1, 10000); // wait 10 seconds max
assertEquals(1, store.getStorefilesCount());
expectedBlockCount -= 2; // evicted two blocks, cached none
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
index 6a0921f..0fd3cdb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +50,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-@Category({IOTests.class, SmallTests.class})
+@Category({ IOTests.class, SmallTests.class })
public class TestHalfStoreFileReader {
private static HBaseTestingUtility TEST_UTIL;
@@ -64,19 +65,14 @@ public class TestHalfStoreFileReader {
}
/**
- * Test the scanner and reseek of a half hfile scanner. The scanner API
- * demands that seekTo and reseekTo() only return < 0 if the key lies
- * before the start of the file (with no position on the scanner). Returning
- * 0 if perfect match (rare), and return > 1 if we got an imperfect match.
- *
- * The latter case being the most common, we should generally be returning 1,
- * and if we do, there may or may not be a 'next' in the scanner/file.
- *
- * A bug in the half file scanner was returning -1 at the end of the bottom
- * half, and that was causing the infrastructure above to go null causing NPEs
- * and other problems. This test reproduces that failure, and also tests
- * both the bottom and top of the file while we are at it.
- *
+ * Test the scanner and reseek of a half hfile scanner. The scanner API demands that seekTo and
+ * reseekTo() only return < 0 if the key lies before the start of the file (with no position on
+ * the scanner). Returning 0 if perfect match (rare), and return > 1 if we got an imperfect match.
+ * The latter case being the most common, we should generally be returning 1, and if we do, there
+ * may or may not be a 'next' in the scanner/file. A bug in the half file scanner was returning -1
+ * at the end of the bottom half, and that was causing the infrastructure above to go null causing
+ * NPEs and other problems. This test reproduces that failure, and also tests both the bottom and
+ * top of the file while we are at it.
* @throws IOException
*/
@Test
@@ -88,10 +84,8 @@ public class TestHalfStoreFileReader {
FileSystem fs = FileSystem.get(conf);
CacheConfig cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
- HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
- .withPath(fs, p)
- .withFileContext(meta)
- .create();
+ HFile.Writer w =
+ HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();
// write some things.
List<KeyValue> items = genSomeKeys();
@@ -100,12 +94,12 @@ public class TestHalfStoreFileReader {
}
w.close();
- HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
+ HFile.Reader r = HFile.createReader(fs, p, cacheConf, true, conf);
r.loadFileInfo();
Cell midKV = r.midkey();
byte[] midkey = CellUtil.cloneRow(midKV);
- //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
+ // System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
Reference bottom = new Reference(midkey, Reference.Range.bottom);
doTestOfScanAndReseek(p, fs, bottom, cacheConf);
@@ -116,11 +110,10 @@ public class TestHalfStoreFileReader {
r.close();
}
- private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom,
- CacheConfig cacheConf)
+ private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, CacheConfig cacheConf)
throws IOException {
- final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
- cacheConf, bottom, TEST_UTIL.getConfiguration());
+ final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p, cacheConf, bottom, true,
+ new AtomicInteger(0), true, TEST_UTIL.getConfiguration());
halfreader.loadFileInfo();
final HFileScanner scanner = halfreader.getScanner(false, false);
@@ -128,110 +121,103 @@ public class TestHalfStoreFileReader {
Cell curr;
do {
curr = scanner.getCell();
- KeyValue reseekKv =
- getLastOnCol(curr);
+ KeyValue reseekKv = getLastOnCol(curr);
int ret = scanner.reseekTo(reseekKv);
assertTrue("reseek to returned: " + ret, ret > 0);
- //System.out.println(curr + ": " + ret);
+ // System.out.println(curr + ": " + ret);
} while (scanner.next());
int ret = scanner.reseekTo(getLastOnCol(curr));
- //System.out.println("Last reseek: " + ret);
- assertTrue( ret > 0 );
+ // System.out.println("Last reseek: " + ret);
+ assertTrue(ret > 0);
halfreader.close(true);
}
-
// Tests the scanner on an HFile that is backed by HalfStoreFiles
@Test
public void testHalfScanner() throws IOException {
- String root_dir = TEST_UTIL.getDataTestDir().toString();
- Path p = new Path(root_dir, "test");
- Configuration conf = TEST_UTIL.getConfiguration();
- FileSystem fs = FileSystem.get(conf);
- CacheConfig cacheConf = new CacheConfig(conf);
- HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
- HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
- .withPath(fs, p)
- .withFileContext(meta)
- .create();
-
- // write some things.
- List<KeyValue> items = genSomeKeys();
- for (KeyValue kv : items) {
- w.append(kv);
- }
- w.close();
+ String root_dir = TEST_UTIL.getDataTestDir().toString();
+ Path p = new Path(root_dir, "test");
+ Configuration conf = TEST_UTIL.getConfiguration();
+ FileSystem fs = FileSystem.get(conf);
+ CacheConfig cacheConf = new CacheConfig(conf);
+ HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
+ HFile.Writer w =
+ HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();
+ // write some things.
+ List<KeyValue> items = genSomeKeys();
+ for (KeyValue kv : items) {
+ w.append(kv);
+ }
+ w.close();
- HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
- r.loadFileInfo();
- Cell midKV = r.midkey();
- byte[] midkey = CellUtil.cloneRow(midKV);
+ HFile.Reader r = HFile.createReader(fs, p, cacheConf, true, conf);
+ r.loadFileInfo();
+ Cell midKV = r.midkey();
+ byte[] midkey = CellUtil.cloneRow(midKV);
- Reference bottom = new Reference(midkey, Reference.Range.bottom);
- Reference top = new Reference(midkey, Reference.Range.top);
+ Reference bottom = new Reference(midkey, Reference.Range.bottom);
+ Reference top = new Reference(midkey, Reference.Range.top);
- // Ugly code to get the item before the midkey
- KeyValue beforeMidKey = null;
- for (KeyValue item : items) {
- if (CellComparator.COMPARATOR.compare(item, midKV) >= 0) {
- break;
- }
- beforeMidKey = item;
+ // Ugly code to get the item before the midkey
+ KeyValue beforeMidKey = null;
+ for (KeyValue item : items) {
+ if (CellComparator.COMPARATOR.compare(item, midKV) >= 0) {
+ break;
}
- System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
- System.out.println("beforeMidKey: " + beforeMidKey);
-
+ beforeMidKey = item;
+ }
+ System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
+ System.out.println("beforeMidKey: " + beforeMidKey);
- // Seek on the splitKey, should be in top, not in bottom
- Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
- assertEquals(beforeMidKey, foundKeyValue);
+ // Seek on the splitKey, should be in top, not in bottom
+ Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
+ assertEquals(beforeMidKey, foundKeyValue);
- // Seek tot the last thing should be the penultimate on the top, the one before the midkey on the bottom.
- foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(items.size() - 1), cacheConf);
- assertEquals(items.get(items.size() - 2), foundKeyValue);
+ // Seek tot the last thing should be the penultimate on the top, the one before the midkey on
+ // the bottom.
+ foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(items.size() - 1), cacheConf);
+ assertEquals(items.get(items.size() - 2), foundKeyValue);
- foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(items.size() - 1), cacheConf);
- assertEquals(beforeMidKey, foundKeyValue);
+ foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(items.size() - 1), cacheConf);
+ assertEquals(beforeMidKey, foundKeyValue);
- // Try and seek before something that is in the bottom.
- foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(0), cacheConf);
- assertNull(foundKeyValue);
+ // Try and seek before something that is in the bottom.
+ foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(0), cacheConf);
+ assertNull(foundKeyValue);
- // Try and seek before the first thing.
- foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(0), cacheConf);
- assertNull(foundKeyValue);
+ // Try and seek before the first thing.
+ foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(0), cacheConf);
+ assertNull(foundKeyValue);
- // Try and seek before the second thing in the top and bottom.
- foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(1), cacheConf);
- assertNull(foundKeyValue);
+ // Try and seek before the second thing in the top and bottom.
+ foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(1), cacheConf);
+ assertNull(foundKeyValue);
- foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(1), cacheConf);
- assertEquals(items.get(0), foundKeyValue);
+ foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(1), cacheConf);
+ assertEquals(items.get(0), foundKeyValue);
- // Try to seek before the splitKey in the top file
- foundKeyValue = doTestOfSeekBefore(p, fs, top, midKV, cacheConf);
- assertNull(foundKeyValue);
- }
+ // Try to seek before the splitKey in the top file
+ foundKeyValue = doTestOfSeekBefore(p, fs, top, midKV, cacheConf);
+ assertNull(foundKeyValue);
+ }
private Cell doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, Cell seekBefore,
- CacheConfig cacheConfig)
- throws IOException {
- final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
- cacheConfig, bottom, TEST_UTIL.getConfiguration());
- halfreader.loadFileInfo();
- final HFileScanner scanner = halfreader.getScanner(false, false);
- scanner.seekBefore(seekBefore);
- return scanner.getCell();
+ CacheConfig cacheConfig) throws IOException {
+ final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p, cacheConfig, bottom, true,
+ new AtomicInteger(0), true, TEST_UTIL.getConfiguration());
+ halfreader.loadFileInfo();
+ final HFileScanner scanner = halfreader.getScanner(false, false);
+ scanner.seekBefore(seekBefore);
+ return scanner.getCell();
}
private KeyValue getLastOnCol(Cell curr) {
- return KeyValueUtil.createLastOnRow(
- curr.getRowArray(), curr.getRowOffset(), curr.getRowLength(),
- curr.getFamilyArray(), curr.getFamilyOffset(), curr.getFamilyLength(),
- curr.getQualifierArray(), curr.getQualifierOffset(), curr.getQualifierLength());
+ return KeyValueUtil.createLastOnRow(curr.getRowArray(), curr.getRowOffset(),
+ curr.getRowLength(), curr.getFamilyArray(), curr.getFamilyOffset(), curr.getFamilyLength(),
+ curr.getQualifierArray(), curr.getQualifierOffset(), curr.getQualifierLength());
}
static final int SIZE = 1000;
@@ -244,18 +230,10 @@ public class TestHalfStoreFileReader {
List<KeyValue> ret = new ArrayList<>(SIZE);
for (int i = 0; i < SIZE; i++) {
KeyValue kv =
- new KeyValue(
- _b(String.format("row_%04d", i)),
- _b("family"),
- _b("qualifier"),
- 1000, // timestamp
+ new KeyValue(_b(String.format("row_%04d", i)), _b("family"), _b("qualifier"), 1000, // timestamp
_b("value"));
ret.add(kv);
}
return ret;
}
-
-
-
}
-
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 3315b6f..49807a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -248,7 +248,7 @@ public class TestCacheOnWrite {
}
private void readStoreFile(boolean useTags) throws IOException {
- HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
LOG.info("HFile information: " + reader);
HFileContext meta = new HFileContextBuilder().withCompression(compress)
.withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
index 4db459a..d209430 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
@@ -161,7 +161,7 @@ public class TestHFile {
Writer w =
HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
w.close();
- Reader r = HFile.createReader(fs, f, cacheConf, conf);
+ Reader r = HFile.createReader(fs, f, cacheConf, true, conf);
r.loadFileInfo();
assertNull(r.getFirstKey());
assertNull(r.getLastKey());
@@ -178,7 +178,7 @@ public class TestHFile {
fsos.close();
try {
- Reader r = HFile.createReader(fs, f, cacheConf, conf);
+ Reader r = HFile.createReader(fs, f, cacheConf, true, conf);
} catch (CorruptHFileException che) {
// Expected failure
return;
@@ -218,7 +218,7 @@ public class TestHFile {
truncateFile(fs, w.getPath(), trunc);
try {
- Reader r = HFile.createReader(fs, trunc, cacheConf, conf);
+ Reader r = HFile.createReader(fs, trunc, cacheConf, true, conf);
} catch (CorruptHFileException che) {
// Expected failure
return;
@@ -453,7 +453,7 @@ public class TestHFile {
writer.append(kv);
writer.close();
fout.close();
- Reader reader = HFile.createReader(fs, mFile, cacheConf, conf);
+ Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf);
reader.loadFileInfo();
assertNull(reader.getMetaBlock("non-existant", false));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index 28930db..2052c1d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -565,7 +565,7 @@ public class TestHFileBlockIndex {
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);
// Read the HFile
- HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
boolean hasArrayIndexOutOfBoundsException = false;
try {
@@ -644,7 +644,7 @@ public class TestHFileBlockIndex {
}
// Read the HFile
- HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
assertEquals(expectedNumLevels,
reader.getTrailer().getNumDataIndexLevels());
@@ -774,7 +774,7 @@ public class TestHFileBlockIndex {
}
hfw.close();
- HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);
// Scanner doesn't do Cells yet. Fix.
HFileScanner scanner = reader.getScanner(true, true);
for (int i = 0; i < keys.size(); ++i) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index 3264558..40e9ab7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -178,7 +178,7 @@ public class TestHFileEncryption {
}
// read it back in and validate correct crypto metadata
- HFile.Reader reader = HFile.createReader(fs, path, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf);
try {
reader.loadFileInfo();
FixedFileTrailer trailer = reader.getTrailer();
@@ -230,7 +230,7 @@ public class TestHFileEncryption {
LOG.info("Reading with " + fileContext);
int i = 0;
HFileScanner scanner = null;
- HFile.Reader reader = HFile.createReader(fs, path, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf);
try {
reader.loadFileInfo();
FixedFileTrailer trailer = reader.getTrailer();
@@ -252,7 +252,7 @@ public class TestHFileEncryption {
// Test random seeks with pread
LOG.info("Random seeking with " + fileContext);
- reader = HFile.createReader(fs, path, cacheConf, conf);
+ reader = HFile.createReader(fs, path, cacheConf, true, conf);
try {
scanner = reader.getScanner(false, true);
assertTrue("Initial seekTo failed", scanner.seekTo());
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
index f1528c2..686024d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
@@ -77,7 +77,7 @@ public class TestHFileInlineToRootChunkConversion {
}
hfw.close();
- HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);
// Scanner doesn't do Cells yet. Fix.
HFileScanner scanner = reader.getScanner(true, true);
for (int i = 0; i < keys.size(); ++i) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
index 4c3db03..dfa5ee8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
@@ -81,7 +81,7 @@ public class TestPrefetch {
private void readStoreFile(Path storeFilePath) throws Exception {
// Open the file
- HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
while (!reader.prefetchComplete()) {
// Sleep for a bit
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
index a9ecf7b..b3cd8ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
@@ -109,8 +109,8 @@ public class TestReseekTo {
writer.close();
fout.close();
- HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(),
- ncTFile, cacheConf, TEST_UTIL.getConfiguration());
+ HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), ncTFile, cacheConf,
+ true, TEST_UTIL.getConfiguration());
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
index d46af4a..f4309ea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
@@ -136,7 +136,7 @@ public class TestSeekBeforeWithInlineBlocks {
}
// Read the HFile
- HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf);
+ HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
// Sanity check the HFile index level
assertEquals(expectedNumLevels, reader.getTrailer().getNumDataIndexLevels());
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
index 6531d2c..b268f0a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
@@ -147,7 +147,7 @@ public class TestSeekTo {
Path p = makeNewFile(tagUsage);
FileSystem fs = TEST_UTIL.getTestFileSystem();
Configuration conf = TEST_UTIL.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
assertFalse(scanner.seekBefore(toKV("a", tagUsage)));
@@ -206,7 +206,7 @@ public class TestSeekTo {
Path p = makeNewFile(tagUsage);
FileSystem fs = TEST_UTIL.getTestFileSystem();
Configuration conf = TEST_UTIL.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
assertFalse(scanner.seekBefore(toKV("a", tagUsage)));
@@ -300,7 +300,7 @@ public class TestSeekTo {
Path p = makeNewFile(tagUsage);
FileSystem fs = TEST_UTIL.getTestFileSystem();
Configuration conf = TEST_UTIL.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
assertEquals(2, reader.getDataBlockIndexReader().getRootBlockCount());
HFileScanner scanner = reader.getScanner(false, true);
@@ -338,7 +338,7 @@ public class TestSeekTo {
Path p = makeNewFile(tagUsage);
FileSystem fs = TEST_UTIL.getTestFileSystem();
Configuration conf = TEST_UTIL.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileBlockIndex.BlockIndexReader blockIndexReader =
reader.getDataBlockIndexReader();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 20fc992..274a76e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -365,8 +365,8 @@ public class TestHFileOutputFormat2 {
FileStatus[] file = fs.listStatus(sub1[0].getPath());
// open as HFile Reader and pull out TIMERANGE FileInfo.
- HFile.Reader rd = HFile.createReader(fs, file[0].getPath(),
- new CacheConfig(conf), conf);
+ HFile.Reader rd =
+ HFile.createReader(fs, file[0].getPath(), new CacheConfig(conf), true, conf);
Map<byte[],byte[]> finfo = rd.loadFileInfo();
byte[] range = finfo.get("TIMERANGE".getBytes());
assertNotNull(range);
@@ -458,8 +458,8 @@ public class TestHFileOutputFormat2 {
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(dir, true);
while(iterator.hasNext()) {
LocatedFileStatus keyFileStatus = iterator.next();
- HFile.Reader reader = HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf),
- conf);
+ HFile.Reader reader =
+ HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), true, conf);
HFileScanner scanner = reader.getScanner(false, false, false);
scanner.seekTo();
Cell cell = scanner.getCell();
@@ -1043,7 +1043,7 @@ public class TestHFileOutputFormat2 {
// verify that the compression on this file matches the configured
// compression
Path dataFilePath = fs.listStatus(f.getPath())[0].getPath();
- Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), conf);
+ Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), true, conf);
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
byte[] bloomFilter = fileInfo.get(StoreFile.BLOOM_FILTER_TYPE_KEY);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index b8d973b..8967ac7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -480,7 +480,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
*/
private static int getKVCountFromHfile(FileSystem fs, Path p) throws IOException {
Configuration conf = util.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, false);
scanner.seekTo();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index b7d5c6f..efcf91e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -556,7 +556,7 @@ public class TestImportTsv implements Configurable {
*/
private static int getKVCountFromHfile(FileSystem fs, Path p) throws IOException {
Configuration conf = util.getConfiguration();
- HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, false);
scanner.seekTo();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 7ae5afc..7f1723c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -620,8 +620,8 @@ public class TestLoadIncrementalHFiles {
private int verifyHFile(Path p) throws IOException {
Configuration conf = util.getConfiguration();
- HFile.Reader reader = HFile.createReader(
- p.getFileSystem(conf), p, new CacheConfig(conf), conf);
+ HFile.Reader reader =
+ HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, false);
scanner.seekTo();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
index 84a2ba7..6647ffe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
@@ -60,8 +60,8 @@ public class TestMobFile extends TestCase {
String caseName = getName();
MobTestUtil.writeStoreFile(writer, caseName);
- MobFile mobFile = new MobFile(new StoreFile(fs, writer.getPath(),
- conf, cacheConf, BloomType.NONE));
+ MobFile mobFile =
+ new MobFile(new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
byte[] family = Bytes.toBytes(caseName);
byte[] qualify = Bytes.toBytes(caseName);
@@ -112,8 +112,8 @@ public class TestMobFile extends TestCase {
.build();
MobTestUtil.writeStoreFile(writer, getName());
- MobFile mobFile = new MobFile(new StoreFile(fs, writer.getPath(),
- conf, cacheConf, BloomType.NONE));
+ MobFile mobFile =
+ new MobFile(new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
assertNotNull(mobFile.getScanner());
assertTrue(mobFile.getScanner() instanceof StoreFileScanner);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
index 83936aa..47a1c24 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
@@ -772,9 +772,7 @@ public class TestMobCompactor {
ResultScanner results = table.getScanner(scan);
int count = 0;
for (Result res : results) {
- for (Cell cell : res.listCells()) {
- count++;
- }
+ count += res.size();
}
results.close();
return count;
@@ -817,8 +815,9 @@ public class TestMobCompactor {
Path path = files[0].getPath();
CacheConfig cacheConf = new CacheConfig(conf);
StoreFile sf = new StoreFile(TEST_UTIL.getTestFileSystem(), path, conf, cacheConf,
- BloomType.NONE);
- HFile.Reader reader = sf.createReader().getHFileReader();
+ BloomType.NONE, true);
+ sf.initReader();
+ HFile.Reader reader = sf.getReader().getHFileReader();
byte[] encryptionKey = reader.getTrailer().getEncryptionKey();
Assert.assertTrue(null != encryptionKey);
Assert.assertTrue(reader.getFileContext().getEncryptionContext().getCipher().getName()
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
index 290e6f4..f65e224 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
@@ -515,10 +515,11 @@ public class TestPartitionedMobCompactor {
try {
for (CompactionDelPartition delPartition : request.getDelPartitions()) {
for (Path newDelPath : delPartition.listDelFiles()) {
- StoreFile sf = new StoreFile(fs, newDelPath, conf, this.cacheConfig, BloomType.NONE);
- // pre-create reader of a del file to avoid race condition when opening the reader in each
- // partition.
- sf.createReader();
+ StoreFile sf =
+ new StoreFile(fs, newDelPath, conf, this.cacheConfig, BloomType.NONE, true);
+ // pre-create reader of a del file to avoid race condition when opening the reader in
+ // each partition.
+ sf.initReader();
delPartition.addStoreFile(sf);
}
}
@@ -768,7 +769,6 @@ public class TestPartitionedMobCompactor {
* @param delPartitions all del partitions
*/
private void compareDelFiles(List<CompactionDelPartition> delPartitions) {
- int i = 0;
Map<Path, Path> delMap = new HashMap<>();
for (CompactionDelPartition delPartition : delPartitions) {
for (Path f : delPartition.listDelFiles()) {
@@ -850,12 +850,12 @@ public class TestPartitionedMobCompactor {
private int countDelCellsInDelFiles(List<Path> paths) throws IOException {
List<StoreFile> sfs = new ArrayList<>();
int size = 0;
- for(Path path : paths) {
- StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.NONE);
+ for (Path path : paths) {
+ StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.NONE, true);
sfs.add(sf);
}
- List scanners = StoreFileScanner.getScannersForStoreFiles(sfs, false, true,
- false, false, HConstants.LATEST_TIMESTAMP);
+ List<KeyValueScanner> scanners = new ArrayList<>(StoreFileScanner.getScannersForStoreFiles(sfs,
+ false, true, false, false, HConstants.LATEST_TIMESTAMP));
Scan scan = new Scan();
scan.setMaxVersions(hcd.getMaxVersions());
long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
index a074a9a..e36d16f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
@@ -592,10 +592,9 @@ public class DataBlockEncodingTool {
Path path = new Path(hfilePath);
CacheConfig cacheConf = new CacheConfig(conf);
FileSystem fs = FileSystem.get(conf);
- StoreFile hsf = new StoreFile(fs, path, conf, cacheConf,
- BloomType.NONE);
-
- StoreFileReader reader = hsf.createReader();
+ StoreFile hsf = new StoreFile(fs, path, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ StoreFileReader reader = hsf.getReader();
reader.loadFileInfo();
KeyValueScanner scanner = reader.getStoreFileScanner(true, true, false, 0, 0, false);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
index eb77c28..f47fc4e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
@@ -60,9 +60,9 @@ public class EncodedSeekPerformanceTest {
// read all of the key values
StoreFile storeFile = new StoreFile(testingUtility.getTestFileSystem(),
- path, configuration, cacheConf, BloomType.NONE);
-
- StoreFileReader reader = storeFile.createReader();
+ path, configuration, cacheConf, BloomType.NONE, true);
+ storeFile.initReader();
+ StoreFileReader reader = storeFile.getReader();
StoreFileScanner scanner = reader.getStoreFileScanner(true, false, false, 0, 0, false);
Cell current;
@@ -90,11 +90,11 @@ public class EncodedSeekPerformanceTest {
List<Cell> seeks) throws IOException {
// read all of the key values
StoreFile storeFile = new StoreFile(testingUtility.getTestFileSystem(),
- path, configuration, cacheConf, BloomType.NONE);
-
+ path, configuration, cacheConf, BloomType.NONE, true);
+ storeFile.initReader();
long totalSize = 0;
- StoreFileReader reader = storeFile.createReader();
+ StoreFileReader reader = storeFile.getReader();
StoreFileScanner scanner = reader.getStoreFileScanner(true, false, false, 0, 0, false);
long startReadingTime = System.nanoTime();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
index 1169434..d52c6c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
@@ -48,7 +48,7 @@ public class MockStoreFile extends StoreFile {
MockStoreFile(HBaseTestingUtility testUtil, Path testPath,
long length, long ageInDisk, boolean isRef, long sequenceid) throws IOException {
super(testUtil.getTestFileSystem(), testPath, testUtil.getConfiguration(),
- new CacheConfig(testUtil.getConfiguration()), BloomType.NONE);
+ new CacheConfig(testUtil.getConfiguration()), BloomType.NONE, true);
this.length = length;
this.isRef = isRef;
this.ageInDisk = ageInDisk;
@@ -126,6 +126,11 @@ public class MockStoreFile extends StoreFile {
}
@Override
+ public boolean isCompactedAway() {
+ return compactedAway;
+ }
+
+ @Override
public long getModificationTimeStamp() {
return modificationTime;
}
@@ -136,11 +141,22 @@ public class MockStoreFile extends StoreFile {
}
@Override
+ public void initReader() throws IOException {
+ }
+
+ @Override
+ public StoreFileScanner getStreamScanner(boolean canUseDropBehind, boolean cacheBlocks,
+ boolean pread, boolean isCompaction, long readPt, long scannerOrder,
+ boolean canOptimizeForNonNullColumn) throws IOException {
+ return getReader().getStoreFileScanner(cacheBlocks, pread, isCompaction, readPt, scannerOrder,
+ canOptimizeForNonNullColumn);
+ }
+
+ @Override
public StoreFileReader getReader() {
final long len = this.length;
final TimeRangeTracker timeRangeTracker = this.timeRangeTracker;
final long entries = this.entryCount;
- final boolean compactedAway = this.compactedAway;
return new StoreFileReader() {
@Override
public long length() {
@@ -158,11 +174,6 @@ public class MockStoreFile extends StoreFile {
}
@Override
- public boolean isCompactedAway() {
- return compactedAway;
- }
-
- @Override
public void close(boolean evictOnClose) throws IOException {
// no-op
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
index 9fed202..efe0605 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
@@ -218,9 +218,9 @@ public class TestCacheOnWriteInSchema {
private void readStoreFile(Path path) throws IOException {
CacheConfig cacheConf = store.getCacheConfig();
BlockCache cache = cacheConf.getBlockCache();
- StoreFile sf = new StoreFile(fs, path, conf, cacheConf,
- BloomType.ROWCOL);
- HFile.Reader reader = sf.createReader().getHFileReader();
+ StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, true);
+ sf.initReader();
+ HFile.Reader reader = sf.getReader().getHFileReader();
try {
// Open a scanner with (on read) caching disabled
HFileScanner scanner = reader.getScanner(false, false);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
index 7154511..58dbe8d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
@@ -40,15 +40,12 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
-import org.junit.experimental.categories.Category;
-@Category(SmallTests.class)
public class TestCompactionPolicy {
private final static Log LOG = LogFactory.getLog(TestCompactionPolicy.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index dfea761..57a5f59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -200,8 +200,9 @@ public class TestCompoundBloomFilter {
private void readStoreFile(int t, BloomType bt, List<KeyValue> kvs,
Path sfPath) throws IOException {
- StoreFile sf = new StoreFile(fs, sfPath, conf, cacheConf, bt);
- StoreFileReader r = sf.createReader();
+ StoreFile sf = new StoreFile(fs, sfPath, conf, cacheConf, bt, true);
+ sf.initReader();
+ StoreFileReader r = sf.getReader();
final boolean pread = true; // does not really matter
StoreFileScanner scanner = r.getStoreFileScanner(true, pread, false, 0, 0, false);
@@ -285,7 +286,7 @@ public class TestCompoundBloomFilter {
private boolean isInBloom(StoreFileScanner scanner, byte[] row,
byte[] qualifier) {
- Scan scan = new Scan(row, row);
+ Scan scan = new Scan().withStartRow(row).withStopRow(row, true);
scan.addColumn(Bytes.toBytes(RandomKeyValueUtil.COLUMN_FAMILY_NAME), qualifier);
Store store = mock(Store.class);
HColumnDescriptor hcd = mock(HColumnDescriptor.class);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index b34c307..3e7477d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -280,7 +280,7 @@ public class TestEncryptionKeyRotation {
private static byte[] extractHFileKey(Path path) throws Exception {
HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
- new CacheConfig(conf), conf);
+ new CacheConfig(conf), true, conf);
try {
reader.loadFileInfo();
Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index 2b0ab7b..3d8eeed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -66,7 +66,7 @@ public class TestEncryptionRandomKeying {
private static byte[] extractHFileKey(Path path) throws Exception {
HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
- new CacheConfig(conf), conf);
+ new CacheConfig(conf), true, conf);
try {
reader.loadFileInfo();
Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
index 9f0975d..e231b60 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
@@ -94,10 +94,10 @@ public class TestFSErrorsExposed {
TestStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
- StoreFile sf = new StoreFile(fs, writer.getPath(),
- util.getConfiguration(), cacheConf, BloomType.NONE);
-
- StoreFileReader reader = sf.createReader();
+ StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
+ sf.initReader();
+ StoreFileReader reader = sf.getReader();
HFileScanner scanner = reader.getScanner(false, true);
FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
@@ -144,8 +144,8 @@ public class TestFSErrorsExposed {
TestStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
- StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(),
- cacheConf, BloomType.NONE);
+ StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(
Collections.singletonList(sf), false, true, false, false,
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
index a50dc42..1997b31 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
@@ -293,8 +293,9 @@ public class TestMobStoreCompaction {
if (fs.exists(mobDirPath)) {
FileStatus[] files = UTIL.getTestFileSystem().listStatus(mobDirPath);
for (FileStatus file : files) {
- StoreFile sf = new StoreFile(fs, file.getPath(), conf, cacheConfig, BloomType.NONE);
- Map<byte[], byte[]> fileInfo = sf.createReader().loadFileInfo();
+ StoreFile sf = new StoreFile(fs, file.getPath(), conf, cacheConfig, BloomType.NONE, true);
+ sf.initReader();
+ Map<byte[], byte[]> fileInfo = sf.getReader().loadFileInfo();
byte[] count = fileInfo.get(StoreFile.MOB_CELLS_COUNT);
assertTrue(count != null);
mobCellsCount += Bytes.toLong(count);
@@ -407,7 +408,7 @@ public class TestMobStoreCompaction {
int size = 0;
if (fs.exists(mobDirPath)) {
for (FileStatus f : fs.listStatus(mobDirPath)) {
- StoreFile sf = new StoreFile(fs, f.getPath(), conf, cacheConfig, BloomType.NONE);
+ StoreFile sf = new StoreFile(fs, f.getPath(), conf, cacheConfig, BloomType.NONE, true);
sfs.add(sf);
if (StoreFileInfo.isDelFile(sf.getPath())) {
numDelfiles++;
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
index ecb808e..bf0fb05 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
@@ -108,8 +108,8 @@ public class TestReversibleScanners {
.withFileContext(hFileContext).build();
writeStoreFile(writer);
- StoreFile sf = new StoreFile(fs, writer.getPath(),
- TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
+ StoreFile sf = new StoreFile(fs, writer.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
List<StoreFileScanner> scanners = StoreFileScanner
.getScannersForStoreFiles(Collections.singletonList(sf),
@@ -162,11 +162,11 @@ public class TestReversibleScanners {
writeMemstoreAndStoreFiles(memstore, new StoreFileWriter[] { writer1,
writer2 });
- StoreFile sf1 = new StoreFile(fs, writer1.getPath(),
- TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
+ StoreFile sf1 = new StoreFile(fs, writer1.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
- StoreFile sf2 = new StoreFile(fs, writer2.getPath(),
- TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
+ StoreFile sf2 = new StoreFile(fs, writer2.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
/**
* Test without MVCC
*/
@@ -252,11 +252,11 @@ public class TestReversibleScanners {
writeMemstoreAndStoreFiles(memstore, new StoreFileWriter[] { writer1,
writer2 });
- StoreFile sf1 = new StoreFile(fs, writer1.getPath(),
- TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
+ StoreFile sf1 = new StoreFile(fs, writer1.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
- StoreFile sf2 = new StoreFile(fs, writer2.getPath(),
- TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
+ StoreFile sf2 = new StoreFile(fs, writer2.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
+ BloomType.NONE, true);
ScanType scanType = ScanType.USER_SCAN;
ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), FAMILYNAME, 0, Integer.MAX_VALUE,
@@ -272,7 +272,7 @@ public class TestReversibleScanners {
// Case 2.Test reversed scan with a specified start row
int startRowNum = ROWSIZE / 2;
byte[] startRow = ROWS[startRowNum];
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan,
scanType, scanInfo, MAXMVCC);
verifyCountAndOrder(storeScanner, QUALSIZE * (startRowNum + 1),
@@ -354,21 +354,21 @@ public class TestReversibleScanners {
// Case5: Case4 + specify start row
int startRowNum = ROWSIZE * 3 / 4;
- scan.setStartRow(ROWS[startRowNum]);
+ scan.withStartRow(ROWS[startRowNum]);
scanner = region.getScanner(scan);
verifyCountAndOrder(scanner, (startRowNum + 1) * 2 * 2, (startRowNum + 1),
false);
// Case6: Case4 + specify stop row
int stopRowNum = ROWSIZE / 4;
- scan.setStartRow(HConstants.EMPTY_BYTE_ARRAY);
- scan.setStopRow(ROWS[stopRowNum]);
+ scan.withStartRow(HConstants.EMPTY_BYTE_ARRAY);
+ scan.withStopRow(ROWS[stopRowNum]);
scanner = region.getScanner(scan);
verifyCountAndOrder(scanner, (ROWSIZE - stopRowNum - 1) * 2 * 2, (ROWSIZE
- stopRowNum - 1), false);
// Case7: Case4 + specify start row + specify stop row
- scan.setStartRow(ROWS[startRowNum]);
+ scan.withStartRow(ROWS[startRowNum]);
scanner = region.getScanner(scan);
verifyCountAndOrder(scanner, (startRowNum - stopRowNum) * 2 * 2,
(startRowNum - stopRowNum), false);
@@ -595,9 +595,6 @@ public class TestReversibleScanners {
// Case2: seek to the previous row in backwardSeek
int seekRowNum = ROWSIZE - 3;
- KeyValue seekKey = KeyValueUtil.createLastOnRow(ROWS[seekRowNum]);
- expectedKey = getNextReadableKeyValueWithBackwardScan(seekRowNum - 1, 0,
- readPoint);
res = false;
for (KeyValueScanner scanner : scanners) {
res |= scanner.backwardSeek(expectedKey);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 0d339b1..76bf1cc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -276,7 +276,7 @@ public class TestStore {
writer.close();
// Verify that compression and encoding settings are respected
- HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
+ HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf);
Assert.assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
Assert.assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding());
reader.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
index 7e4ebd8..d1444c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
@@ -27,6 +27,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -116,8 +117,7 @@ public class TestStoreFile extends HBaseTestCase {
writeStoreFile(writer);
Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
- StoreFile sf = new StoreFile(this.fs, sfPath, conf, cacheConf,
- BloomType.NONE);
+ StoreFile sf = new StoreFile(this.fs, sfPath, conf, cacheConf, BloomType.NONE, true);
checkHalfHFile(regionFs, sf);
}
@@ -169,9 +169,9 @@ public class TestStoreFile extends HBaseTestCase {
writeStoreFile(writer);
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
- StoreFile hsf = new StoreFile(this.fs, hsfPath, conf, cacheConf,
- BloomType.NONE);
- StoreFileReader reader = hsf.createReader();
+ StoreFile hsf = new StoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ StoreFileReader reader = hsf.getReader();
// Split on a row, not in middle of row. Midkey returned by reader
// may be in middle of row. Create new one with empty column and
// timestamp.
@@ -184,11 +184,11 @@ public class TestStoreFile extends HBaseTestCase {
// Make a reference
HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow);
Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true);
- StoreFile refHsf = new StoreFile(this.fs, refPath, conf, cacheConf,
- BloomType.NONE);
+ StoreFile refHsf = new StoreFile(this.fs, refPath, conf, cacheConf, BloomType.NONE, true);
+ refHsf.initReader();
// Now confirm that I can read from the reference and that it only gets
// keys from top half of the file.
- HFileScanner s = refHsf.createReader().getScanner(false, false);
+ HFileScanner s = refHsf.getReader().getScanner(false, false);
for(boolean first = true; (!s.isSeeked() && s.seekTo()) || s.next();) {
ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey());
kv = KeyValueUtil.createKeyValueFromKey(bb);
@@ -242,13 +242,14 @@ public class TestStoreFile extends HBaseTestCase {
// Try to open store file from link
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath);
- StoreFile hsf = new StoreFile(this.fs, storeFileInfo, testConf, cacheConf,
- BloomType.NONE);
+ StoreFile hsf =
+ new StoreFile(this.fs, storeFileInfo, testConf, cacheConf, BloomType.NONE, true);
assertTrue(storeFileInfo.isLink());
+ hsf.initReader();
// Now confirm that I can read from the link
int count = 1;
- HFileScanner s = hsf.createReader().getScanner(false, false);
+ HFileScanner s = hsf.getReader().getScanner(false, false);
s.seekTo();
while (s.next()) {
count++;
@@ -295,8 +296,8 @@ public class TestStoreFile extends HBaseTestCase {
// <root>/clone/splitB/<cf>/<reftohfilelink>
HRegionInfo splitHriA = new HRegionInfo(hri.getTable(), null, SPLITKEY);
HRegionInfo splitHriB = new HRegionInfo(hri.getTable(), SPLITKEY, null);
- StoreFile f = new StoreFile(fs, linkFilePath, testConf, cacheConf, BloomType.NONE);
- f.createReader();
+ StoreFile f = new StoreFile(fs, linkFilePath, testConf, cacheConf, BloomType.NONE, true);
+ f.initReader();
Path pathA = splitStoreFile(cloneRegionFs, splitHriA, TEST_FAMILY, f, SPLITKEY, true); // top
Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom
f.closeReader(true);
@@ -307,12 +308,12 @@ public class TestStoreFile extends HBaseTestCase {
// reference to a hfile link. This code in StoreFile that handles this case.
// Try to open store file from link
- StoreFile hsfA = new StoreFile(this.fs, pathA, testConf, cacheConf,
- BloomType.NONE);
+ StoreFile hsfA = new StoreFile(this.fs, pathA, testConf, cacheConf, BloomType.NONE, true);
+ hsfA.initReader();
// Now confirm that I can read from the ref to link
int count = 1;
- HFileScanner s = hsfA.createReader().getScanner(false, false);
+ HFileScanner s = hsfA.getReader().getScanner(false, false);
s.seekTo();
while (s.next()) {
count++;
@@ -320,11 +321,11 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(count > 0); // read some rows here
// Try to open store file from link
- StoreFile hsfB = new StoreFile(this.fs, pathB, testConf, cacheConf,
- BloomType.NONE);
+ StoreFile hsfB = new StoreFile(this.fs, pathB, testConf, cacheConf, BloomType.NONE, true);
+ hsfB.initReader();
// Now confirm that I can read from the ref to link
- HFileScanner sB = hsfB.createReader().getScanner(false, false);
+ HFileScanner sB = hsfB.getReader().getScanner(false, false);
sB.seekTo();
//count++ as seekTo() will advance the scanner
@@ -339,7 +340,8 @@ public class TestStoreFile extends HBaseTestCase {
private void checkHalfHFile(final HRegionFileSystem regionFs, final StoreFile f)
throws IOException {
- Cell midkey = f.createReader().midkey();
+ f.initReader();
+ Cell midkey = f.getReader().midkey();
KeyValue midKV = (KeyValue)midkey;
byte [] midRow = CellUtil.cloneRow(midKV);
// Create top split.
@@ -351,10 +353,12 @@ public class TestStoreFile extends HBaseTestCase {
midRow, null);
Path bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, midRow, false);
// Make readers on top and bottom.
- StoreFileReader top = new StoreFile(
- this.fs, topPath, conf, cacheConf, BloomType.NONE).createReader();
- StoreFileReader bottom = new StoreFile(
- this.fs, bottomPath, conf, cacheConf, BloomType.NONE).createReader();
+ StoreFile topF = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE, true);
+ topF.initReader();
+ StoreFileReader top = topF.getReader();
+ StoreFile bottomF = new StoreFile(this.fs, bottomPath, conf, cacheConf, BloomType.NONE, true);
+ bottomF.initReader();
+ StoreFileReader bottom = bottomF.getReader();
ByteBuffer previous = null;
LOG.info("Midkey: " + midKV.toString());
ByteBuffer bbMidkeyBytes = ByteBuffer.wrap(midKV.getKey());
@@ -412,7 +416,9 @@ public class TestStoreFile extends HBaseTestCase {
assertNull(bottomPath);
- top = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE).createReader();
+ topF = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE, true);
+ topF.initReader();
+ top = topF.getReader();
// Now read from the top.
first = true;
topScanner = top.getScanner(false, false);
@@ -449,8 +455,10 @@ public class TestStoreFile extends HBaseTestCase {
topPath = splitStoreFile(regionFs,topHri, TEST_FAMILY, f, badmidkey, true);
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
assertNull(topPath);
- bottom = new StoreFile(this.fs, bottomPath, conf, cacheConf,
- BloomType.NONE).createReader();
+
+ bottomF = new StoreFile(this.fs, bottomPath, conf, cacheConf, BloomType.NONE, true);
+ bottomF.initReader();
+ bottom = bottomF.getReader();
first = true;
bottomScanner = bottom.getScanner(false, false);
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
@@ -502,7 +510,8 @@ public class TestStoreFile extends HBaseTestCase {
}
writer.close();
- StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
+ StoreFileReader reader =
+ new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo();
reader.loadBloomfilter();
StoreFileScanner scanner = getStoreFileScanner(reader, false, false);
@@ -590,7 +599,8 @@ public class TestStoreFile extends HBaseTestCase {
}
writer.close();
- StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
+ StoreFileReader reader =
+ new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo();
reader.loadBloomfilter();
@@ -635,7 +645,8 @@ public class TestStoreFile extends HBaseTestCase {
writeStoreFile(writer);
writer.close();
- StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
+ StoreFileReader reader =
+ new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
// Now do reseek with empty KV to position to the beginning of the file
@@ -695,7 +706,8 @@ public class TestStoreFile extends HBaseTestCase {
}
writer.close();
- StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
+ StoreFileReader reader =
+ new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo();
reader.loadBloomfilter();
StoreFileScanner scanner = getStoreFileScanner(reader, false, false);
@@ -844,12 +856,13 @@ public class TestStoreFile extends HBaseTestCase {
writer.close();
StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
- BloomType.NONE);
+ BloomType.NONE, true);
Store store = mock(Store.class);
HColumnDescriptor hcd = mock(HColumnDescriptor.class);
when(hcd.getName()).thenReturn(family);
when(store.getFamily()).thenReturn(hcd);
- StoreFileReader reader = hsf.createReader();
+ hsf.initReader();
+ StoreFileReader reader = hsf.getReader();
StoreFileScanner scanner = getStoreFileScanner(reader, false, false);
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
columns.add(qualifier);
@@ -901,11 +914,12 @@ public class TestStoreFile extends HBaseTestCase {
Path pathCowOff = new Path(baseDir, "123456789");
StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
- BloomType.NONE);
+ BloomType.NONE, true);
LOG.debug(hsf.getPath().toString());
// Read this file, we should see 3 misses
- StoreFileReader reader = hsf.createReader();
+ hsf.initReader();
+ StoreFileReader reader = hsf.getReader();
reader.loadFileInfo();
StoreFileScanner scanner = getStoreFileScanner(reader, true, true);
scanner.seek(KeyValue.LOWESTKEY);
@@ -923,10 +937,11 @@ public class TestStoreFile extends HBaseTestCase {
Path pathCowOn = new Path(baseDir, "123456788");
writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
- BloomType.NONE);
+ BloomType.NONE, true);
// Read this file, we should see 3 hits
- reader = hsf.createReader();
+ hsf.initReader();
+ reader = hsf.getReader();
scanner = getStoreFileScanner(reader, true, true);
scanner.seek(KeyValue.LOWESTKEY);
while (scanner.next() != null);
@@ -938,15 +953,15 @@ public class TestStoreFile extends HBaseTestCase {
reader.close(cacheConf.shouldEvictOnClose());
// Let's read back the two files to ensure the blocks exactly match
- hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
- BloomType.NONE);
- StoreFileReader readerOne = hsf.createReader();
+ hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ StoreFileReader readerOne = hsf.getReader();
readerOne.loadFileInfo();
StoreFileScanner scannerOne = getStoreFileScanner(readerOne, true, true);
scannerOne.seek(KeyValue.LOWESTKEY);
- hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
- BloomType.NONE);
- StoreFileReader readerTwo = hsf.createReader();
+ hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ StoreFileReader readerTwo = hsf.getReader();
readerTwo.loadFileInfo();
StoreFileScanner scannerTwo = getStoreFileScanner(readerTwo, true, true);
scannerTwo.seek(KeyValue.LOWESTKEY);
@@ -977,9 +992,9 @@ public class TestStoreFile extends HBaseTestCase {
// Let's close the first file with evict on close turned on
conf.setBoolean("hbase.rs.evictblocksonclose", true);
cacheConf = new CacheConfig(conf);
- hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
- BloomType.NONE);
- reader = hsf.createReader();
+ hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ reader = hsf.getReader();
reader.close(cacheConf.shouldEvictOnClose());
// We should have 3 new evictions but the evict count stat should not change. Eviction because
@@ -991,9 +1006,9 @@ public class TestStoreFile extends HBaseTestCase {
// Let's close the second file with evict on close turned off
conf.setBoolean("hbase.rs.evictblocksonclose", false);
cacheConf = new CacheConfig(conf);
- hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
- BloomType.NONE);
- reader = hsf.createReader();
+ hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf, BloomType.NONE, true);
+ hsf.initReader();
+ reader = hsf.getReader();
reader.close(cacheConf.shouldEvictOnClose());
// We expect no changes
@@ -1078,9 +1093,10 @@ public class TestStoreFile extends HBaseTestCase {
.build();
writer.close();
- StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
- cacheConf, BloomType.NONE);
- StoreFileReader reader = storeFile.createReader();
+ StoreFile storeFile =
+ new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
+ storeFile.initReader();
+ StoreFileReader reader = storeFile.getReader();
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
index d628dc8..3d3c79c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
@@ -23,23 +23,24 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
-import org.apache.hadoop.hbase.testclassification.RegionServerTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -74,7 +75,8 @@ public class TestStoreFileScannerWithTagCompression {
writeStoreFile(writer);
writer.close();
- StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
+ StoreFileReader reader =
+ new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false);
try {
// Now do reseek with empty KV to position to the beginning of the file
http://git-wip-us.apache.org/repos/asf/hbase/blob/66b616d7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index dff6919..170fba2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -67,9 +67,6 @@ public class TestCompactor {
when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(),
anyBoolean())).thenReturn(mock(StoreFileScanner.class));
when(sf.getReader()).thenReturn(r);
- when(sf.createReader()).thenReturn(r);
- when(sf.createReader(anyBoolean())).thenReturn(r);
- when(sf.cloneForReader()).thenReturn(sf);
when(sf.getMaxSequenceId()).thenReturn(maxSequenceId);
return sf;
}