You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2011/08/03 21:59:54 UTC
svn commit: r1153634 [4/4] - in /hbase/trunk: ./
src/main/java/org/apache/hadoop/hbase/
src/main/java/org/apache/hadoop/hbase/io/hfile/
src/main/java/org/apache/hadoop/hbase/mapreduce/
src/main/java/org/apache/hadoop/hbase/regionserver/ src/main/java/o...
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java Wed Aug 3 19:59:48 2011
@@ -118,8 +118,8 @@ public class TestHFileSeek extends TestC
long totalBytes = 0;
FSDataOutputStream fout = createFSOutput(path, fs);
try {
- Writer writer =
- new Writer(fout, options.minBlockSize, options.compress, null);
+ Writer writer = HFile.getWriterFactory(conf).createWriter(fout,
+ options.minBlockSize, options.compress, null);
try {
BytesWritable key = new BytesWritable();
BytesWritable val = new BytesWritable();
@@ -163,8 +163,8 @@ public class TestHFileSeek extends TestC
int miss = 0;
long totalBytes = 0;
FSDataInputStream fsdis = fs.open(path);
- Reader reader = new Reader(path, fsdis, fs.getFileStatus(path).getLen(),
- null, false, false);
+ Reader reader = HFile.createReader(path, fsdis,
+ fs.getFileStatus(path).getLen(), null, false, false);
reader.loadFileInfo();
KeySampler kSampler =
new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java Wed Aug 3 19:59:48 2011
@@ -19,7 +19,6 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import java.nio.ByteBuffer;
import java.util.Random;
import org.apache.hadoop.hbase.io.HeapSize;
@@ -43,11 +42,11 @@ public class TestLruBlockCache extends T
LruBlockCache cache = new LruBlockCache(maxSize,blockSize);
- Block [] blocks = generateFixedBlocks(10, blockSize, "block");
+ CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
// Add all the blocks
- for(Block block : blocks) {
- cache.cacheBlock(block.blockName, block.buf);
+ for (CachedItem block : blocks) {
+ cache.cacheBlock(block.blockName, block);
}
// Let the eviction run
@@ -70,35 +69,35 @@ public class TestLruBlockCache extends T
LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
- Block [] blocks = generateRandomBlocks(100, blockSize);
+ CachedItem [] blocks = generateRandomBlocks(100, blockSize);
long expectedCacheSize = cache.heapSize();
// Confirm empty
- for(Block block : blocks) {
+ for (CachedItem block : blocks) {
assertTrue(cache.getBlock(block.blockName, true) == null);
}
// Add blocks
- for(Block block : blocks) {
- cache.cacheBlock(block.blockName, block.buf);
- expectedCacheSize += block.heapSize();
+ for (CachedItem block : blocks) {
+ cache.cacheBlock(block.blockName, block);
+ expectedCacheSize += block.cacheBlockHeapSize();
}
// Verify correctly calculated cache heap size
assertEquals(expectedCacheSize, cache.heapSize());
// Check if all blocks are properly cached and retrieved
- for(Block block : blocks) {
- ByteBuffer buf = cache.getBlock(block.blockName, true);
+ for (CachedItem block : blocks) {
+ HeapSize buf = cache.getBlock(block.blockName, true);
assertTrue(buf != null);
- assertEquals(buf.capacity(), block.buf.capacity());
+ assertEquals(buf.heapSize(), block.heapSize());
}
// Re-add same blocks and ensure nothing has changed
- for(Block block : blocks) {
+ for (CachedItem block : blocks) {
try {
- cache.cacheBlock(block.blockName, block.buf);
+ cache.cacheBlock(block.blockName, block);
assertTrue("Cache should not allow re-caching a block", false);
} catch(RuntimeException re) {
// expected
@@ -109,10 +108,10 @@ public class TestLruBlockCache extends T
assertEquals(expectedCacheSize, cache.heapSize());
// Check if all blocks are properly cached and retrieved
- for(Block block : blocks) {
- ByteBuffer buf = cache.getBlock(block.blockName, true);
+ for (CachedItem block : blocks) {
+ HeapSize buf = cache.getBlock(block.blockName, true);
assertTrue(buf != null);
- assertEquals(buf.capacity(), block.buf.capacity());
+ assertEquals(buf.heapSize(), block.heapSize());
}
// Expect no evictions
@@ -129,14 +128,14 @@ public class TestLruBlockCache extends T
LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
- Block [] blocks = generateFixedBlocks(10, blockSize, "block");
+ CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
long expectedCacheSize = cache.heapSize();
// Add all the blocks
- for(Block block : blocks) {
- cache.cacheBlock(block.blockName, block.buf);
- expectedCacheSize += block.heapSize();
+ for (CachedItem block : blocks) {
+ cache.cacheBlock(block.blockName, block);
+ expectedCacheSize += block.cacheBlockHeapSize();
}
// A single eviction run should have occurred
@@ -158,7 +157,7 @@ public class TestLruBlockCache extends T
assertTrue(cache.getBlock(blocks[1].blockName, true) == null);
for(int i=2;i<blocks.length;i++) {
assertEquals(cache.getBlock(blocks[i].blockName, true),
- blocks[i].buf);
+ blocks[i]);
}
}
@@ -169,21 +168,21 @@ public class TestLruBlockCache extends T
LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
- Block [] singleBlocks = generateFixedBlocks(5, 10000, "single");
- Block [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
+ CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single");
+ CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
long expectedCacheSize = cache.heapSize();
// Add and get the multi blocks
- for(Block block : multiBlocks) {
- cache.cacheBlock(block.blockName, block.buf);
- expectedCacheSize += block.heapSize();
- assertEquals(cache.getBlock(block.blockName, true), block.buf);
+ for (CachedItem block : multiBlocks) {
+ cache.cacheBlock(block.blockName, block);
+ expectedCacheSize += block.cacheBlockHeapSize();
+ assertEquals(cache.getBlock(block.blockName, true), block);
}
// Add the single blocks (no get)
- for(Block block : singleBlocks) {
- cache.cacheBlock(block.blockName, block.buf);
+ for (CachedItem block : singleBlocks) {
+ cache.cacheBlock(block.blockName, block);
expectedCacheSize += block.heapSize();
}
@@ -214,9 +213,9 @@ public class TestLruBlockCache extends T
// And all others to be cached
for(int i=1;i<4;i++) {
assertEquals(cache.getBlock(singleBlocks[i].blockName, true),
- singleBlocks[i].buf);
+ singleBlocks[i]);
assertEquals(cache.getBlock(multiBlocks[i].blockName, true),
- multiBlocks[i].buf);
+ multiBlocks[i]);
}
}
@@ -236,9 +235,9 @@ public class TestLruBlockCache extends T
0.34f);// memory
- Block [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
- Block [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
- Block [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
+ CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
+ CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
+ CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
long expectedCacheSize = cache.heapSize();
@@ -246,17 +245,17 @@ public class TestLruBlockCache extends T
for(int i=0;i<3;i++) {
// Just add single blocks
- cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i].buf);
- expectedCacheSize += singleBlocks[i].heapSize();
+ cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i]);
+ expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
// Add and get multi blocks
- cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i].buf);
- expectedCacheSize += multiBlocks[i].heapSize();
+ cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i]);
+ expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
cache.getBlock(multiBlocks[i].blockName, true);
// Add memory blocks as such
- cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i].buf, true);
- expectedCacheSize += memoryBlocks[i].heapSize();
+ cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i], true);
+ expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize();
}
@@ -267,7 +266,7 @@ public class TestLruBlockCache extends T
assertEquals(expectedCacheSize, cache.heapSize());
// Insert a single block, oldest single should be evicted
- cache.cacheBlock(singleBlocks[3].blockName, singleBlocks[3].buf);
+ cache.cacheBlock(singleBlocks[3].blockName, singleBlocks[3]);
// Single eviction, one thing evicted
assertEquals(1, cache.getEvictionCount());
@@ -280,7 +279,7 @@ public class TestLruBlockCache extends T
cache.getBlock(singleBlocks[1].blockName, true);
// Insert another single block
- cache.cacheBlock(singleBlocks[4].blockName, singleBlocks[4].buf);
+ cache.cacheBlock(singleBlocks[4].blockName, singleBlocks[4]);
// Two evictions, two evicted.
assertEquals(2, cache.getEvictionCount());
@@ -290,7 +289,7 @@ public class TestLruBlockCache extends T
assertEquals(null, cache.getBlock(multiBlocks[0].blockName, true));
// Insert another memory block
- cache.cacheBlock(memoryBlocks[3].blockName, memoryBlocks[3].buf, true);
+ cache.cacheBlock(memoryBlocks[3].blockName, memoryBlocks[3], true);
// Three evictions, three evicted.
assertEquals(3, cache.getEvictionCount());
@@ -300,8 +299,8 @@ public class TestLruBlockCache extends T
assertEquals(null, cache.getBlock(memoryBlocks[0].blockName, true));
// Add a block that is twice as big (should force two evictions)
- Block [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
- cache.cacheBlock(bigBlocks[0].blockName, bigBlocks[0].buf);
+ CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
+ cache.cacheBlock(bigBlocks[0].blockName, bigBlocks[0]);
// Four evictions, six evicted (inserted block 3X size, expect +3 evicted)
assertEquals(4, cache.getEvictionCount());
@@ -316,7 +315,7 @@ public class TestLruBlockCache extends T
cache.getBlock(bigBlocks[0].blockName, true);
// Cache another single big block
- cache.cacheBlock(bigBlocks[1].blockName, bigBlocks[1].buf);
+ cache.cacheBlock(bigBlocks[1].blockName, bigBlocks[1]);
// Five evictions, nine evicted (3 new)
assertEquals(5, cache.getEvictionCount());
@@ -328,7 +327,7 @@ public class TestLruBlockCache extends T
assertEquals(null, cache.getBlock(multiBlocks[2].blockName, true));
// Cache a big memory block
- cache.cacheBlock(bigBlocks[2].blockName, bigBlocks[2].buf, true);
+ cache.cacheBlock(bigBlocks[2].blockName, bigBlocks[2], true);
// Six evictions, twelve evicted (3 new)
assertEquals(6, cache.getEvictionCount());
@@ -358,18 +357,18 @@ public class TestLruBlockCache extends T
0.33f, // multi
0.34f);// memory
- Block [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
- Block [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
+ CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
+ CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
// Add 5 multi blocks
- for(Block block : multiBlocks) {
- cache.cacheBlock(block.blockName, block.buf);
+ for (CachedItem block : multiBlocks) {
+ cache.cacheBlock(block.blockName, block);
cache.getBlock(block.blockName, true);
}
// Add 5 single blocks
for(int i=0;i<5;i++) {
- cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i].buf);
+ cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i]);
}
// An eviction ran
@@ -392,7 +391,7 @@ public class TestLruBlockCache extends T
// 12 more evicted.
for(int i=5;i<18;i++) {
- cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i].buf);
+ cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i]);
}
// 4 total evictions, 16 total evicted
@@ -420,22 +419,22 @@ public class TestLruBlockCache extends T
0.33f, // multi
0.34f);// memory
- Block [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
- Block [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
- Block [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
+ CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
+ CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
+ CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
// Add all blocks from all priorities
for(int i=0;i<10;i++) {
// Just add single blocks
- cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i].buf);
+ cache.cacheBlock(singleBlocks[i].blockName, singleBlocks[i]);
// Add and get multi blocks
- cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i].buf);
+ cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i]);
cache.getBlock(multiBlocks[i].blockName, true);
// Add memory blocks as such
- cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i].buf, true);
+ cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i], true);
}
// Do not expect any evictions yet
@@ -459,29 +458,29 @@ public class TestLruBlockCache extends T
// And the newest 5 blocks should still be accessible
for(int i=5;i<10;i++) {
- assertEquals(singleBlocks[i].buf, cache.getBlock(singleBlocks[i].blockName, true));
- assertEquals(multiBlocks[i].buf, cache.getBlock(multiBlocks[i].blockName, true));
- assertEquals(memoryBlocks[i].buf, cache.getBlock(memoryBlocks[i].blockName, true));
+ assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].blockName, true));
+ assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].blockName, true));
+ assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].blockName, true));
}
}
- private Block [] generateFixedBlocks(int numBlocks, int size, String pfx) {
- Block [] blocks = new Block[numBlocks];
+ private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) {
+ CachedItem [] blocks = new CachedItem[numBlocks];
for(int i=0;i<numBlocks;i++) {
- blocks[i] = new Block(pfx + i, size);
+ blocks[i] = new CachedItem(pfx + i, size);
}
return blocks;
}
- private Block [] generateFixedBlocks(int numBlocks, long size, String pfx) {
+ private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) {
return generateFixedBlocks(numBlocks, (int)size, pfx);
}
- private Block [] generateRandomBlocks(int numBlocks, long maxSize) {
- Block [] blocks = new Block[numBlocks];
+ private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) {
+ CachedItem [] blocks = new CachedItem[numBlocks];
Random r = new Random();
for(int i=0;i<numBlocks;i++) {
- blocks[i] = new Block("block" + i, r.nextInt((int)maxSize)+1);
+ blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1);
}
return blocks;
}
@@ -511,19 +510,26 @@ public class TestLruBlockCache extends T
LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
}
- private static class Block implements HeapSize {
+ private static class CachedItem implements HeapSize {
String blockName;
- ByteBuffer buf;
+ int size;
- Block(String blockName, int size) {
+ CachedItem(String blockName, int size) {
this.blockName = blockName;
- this.buf = ByteBuffer.allocate(size);
+ this.size = size;
}
+ /** The size of this item reported to the block cache layer */
+ @Override
public long heapSize() {
- return CachedBlock.PER_BLOCK_OVERHEAD +
- ClassSize.align(blockName.length()) +
- ClassSize.align(buf.capacity());
+ return ClassSize.align(size);
+ }
+
+ /** Size of the cache block holding this item. Used for verification. */
+ public long cacheBlockHeapSize() {
+ return CachedBlock.PER_BLOCK_OVERHEAD
+ + ClassSize.align(blockName.length())
+ + ClassSize.align(size);
}
}
}
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java Wed Aug 3 19:59:48 2011
@@ -42,7 +42,8 @@ public class TestReseekTo {
Path ncTFile = new Path(HBaseTestingUtility.getTestDir(), "basic.hfile");
FSDataOutputStream fout = TEST_UTIL.getTestFileSystem().create(ncTFile);
- HFile.Writer writer = new HFile.Writer(fout, 4000, "none", null);
+ HFile.Writer writer = HFile.getWriterFactory(
+ TEST_UTIL.getConfiguration()).createWriter(fout, 4000, "none", null);
int numberOfKeys = 1000;
String valueString = "Value";
@@ -59,7 +60,7 @@ public class TestReseekTo {
writer.close();
fout.close();
- HFile.Reader reader = new HFile.Reader(TEST_UTIL.getTestFileSystem(),
+ HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(),
ncTFile, null, false, false);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java Wed Aug 3 19:59:48 2011
@@ -45,7 +45,8 @@ public class TestSeekTo extends HBaseTes
Path ncTFile = new Path(this.testDir, "basic.hfile");
FSDataOutputStream fout = this.fs.create(ncTFile);
int blocksize = toKV("a").getLength() * 3;
- HFile.Writer writer = new HFile.Writer(fout, blocksize, "none", null);
+ HFile.Writer writer = HFile.getWriterFactory(conf).createWriter(fout,
+ blocksize, "none", null);
// 4 bytes * 3 * 2 for each key/value +
// 3 for keys, 15 for values = 42 (woot)
writer.append(toKV("c"));
@@ -58,9 +59,10 @@ public class TestSeekTo extends HBaseTes
fout.close();
return ncTFile;
}
+
public void testSeekBefore() throws Exception {
Path p = makeNewFile();
- HFile.Reader reader = new HFile.Reader(fs, p, null, false, false);
+ HFile.Reader reader = HFile.createReader(fs, p, null, false, false);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
assertEquals(false, scanner.seekBefore(toKV("a").getKey()));
@@ -93,9 +95,9 @@ public class TestSeekTo extends HBaseTes
public void testSeekTo() throws Exception {
Path p = makeNewFile();
- HFile.Reader reader = new HFile.Reader(fs, p, null, false, false);
+ HFile.Reader reader = HFile.createReader(fs, p, null, false, false);
reader.loadFileInfo();
- assertEquals(2, reader.blockIndex.count);
+ assertEquals(2, reader.getDataBlockIndexReader().getRootBlockCount());
HFileScanner scanner = reader.getScanner(false, true);
// lies before the start of the file.
assertEquals(-1, scanner.seekTo(toKV("a").getKey()));
@@ -113,30 +115,32 @@ public class TestSeekTo extends HBaseTes
public void testBlockContainingKey() throws Exception {
Path p = makeNewFile();
- HFile.Reader reader = new HFile.Reader(fs, p, null, false, false);
+ HFile.Reader reader = HFile.createReader(fs, p, null, false, false);
reader.loadFileInfo();
- System.out.println(reader.blockIndex.toString());
+ HFileBlockIndex.BlockIndexReader blockIndexReader =
+ reader.getDataBlockIndexReader();
+ System.out.println(blockIndexReader.toString());
int klen = toKV("a").getKey().length;
// falls before the start of the file.
- assertEquals(-1, reader.blockIndex.blockContainingKey(toKV("a").getKey(),
- 0, klen));
- assertEquals(0, reader.blockIndex.blockContainingKey(toKV("c").getKey(), 0,
- klen));
- assertEquals(0, reader.blockIndex.blockContainingKey(toKV("d").getKey(), 0,
- klen));
- assertEquals(0, reader.blockIndex.blockContainingKey(toKV("e").getKey(), 0,
- klen));
- assertEquals(0, reader.blockIndex.blockContainingKey(toKV("g").getKey(), 0,
- klen));
- assertEquals(0, reader.blockIndex.blockContainingKey(toKV("h").getKey(), 0,
- klen));
- assertEquals(1, reader.blockIndex.blockContainingKey(toKV("i").getKey(), 0,
- klen));
- assertEquals(1, reader.blockIndex.blockContainingKey(toKV("j").getKey(), 0,
- klen));
- assertEquals(1, reader.blockIndex.blockContainingKey(toKV("k").getKey(), 0,
- klen));
- assertEquals(1, reader.blockIndex.blockContainingKey(toKV("l").getKey(), 0,
- klen));
- }
+ assertEquals(-1, blockIndexReader.rootBlockContainingKey(
+ toKV("a").getKey(), 0, klen));
+ assertEquals(0, blockIndexReader.rootBlockContainingKey(
+ toKV("c").getKey(), 0, klen));
+ assertEquals(0, blockIndexReader.rootBlockContainingKey(
+ toKV("d").getKey(), 0, klen));
+ assertEquals(0, blockIndexReader.rootBlockContainingKey(
+ toKV("e").getKey(), 0, klen));
+ assertEquals(0, blockIndexReader.rootBlockContainingKey(
+ toKV("g").getKey(), 0, klen));
+ assertEquals(0, blockIndexReader.rootBlockContainingKey(
+ toKV("h").getKey(), 0, klen));
+ assertEquals(1, blockIndexReader.rootBlockContainingKey(
+ toKV("i").getKey(), 0, klen));
+ assertEquals(1, blockIndexReader.rootBlockContainingKey(
+ toKV("j").getKey(), 0, klen));
+ assertEquals(1, blockIndexReader.rootBlockContainingKey(
+ toKV("k").getKey(), 0, klen));
+ assertEquals(1, blockIndexReader.rootBlockContainingKey(
+ toKV("l").getKey(), 0, klen));
+ }
}
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java Wed Aug 3 19:59:48 2011
@@ -283,7 +283,7 @@ public class TestHFileOutputFormat {
FileStatus[] file = fs.listStatus(sub3[0].getPath());
// open as HFile Reader and pull out TIMERANGE FileInfo.
- HFile.Reader rd = new HFile.Reader(fs, file[0].getPath(), null, true,
+ HFile.Reader rd = HFile.createReader(fs, file[0].getPath(), null, true,
false);
Map<byte[],byte[]> finfo = rd.loadFileInfo();
byte[] range = finfo.get("TIMERANGE".getBytes());
@@ -578,6 +578,9 @@ public class TestHFileOutputFormat {
try {
// partial map red setup to get an operational writer for testing
+ // We turn off the sequence file compression, because DefaultCodec
+ // pollutes the GZip codec pool with an incompatible compressor.
+ conf.set("io.seqfile.compression.type", "NONE");
Job job = new Job(conf, "testLocalMRIncrementalLoad");
setupRandomGeneratorMapper(job);
HFileOutputFormat.configureIncrementalLoad(job, table);
@@ -607,7 +610,8 @@ public class TestHFileOutputFormat {
// verify that the compression on this file matches the configured
// compression
Path dataFilePath = fileSystem.listStatus(f.getPath())[0].getPath();
- Reader reader = new HFile.Reader(fileSystem, dataFilePath, null, false, true);
+ Reader reader = HFile.createReader(fileSystem, dataFilePath, null,
+ false, true);
reader.loadFileInfo();
assertEquals("Incorrect compression used for column family " + familyStr
+ "(reader: " + reader + ")",
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Wed Aug 3 19:59:48 2011
@@ -101,8 +101,8 @@ public class TestLoadIncrementalHFiles {
for (byte[][] range : hfileRanges) {
byte[] from = range[0];
byte[] to = range[1];
- createHFile(fs, new Path(familyDir, "hfile_" + hfileIdx++),
- FAMILY, QUALIFIER, from, to, 1000);
+ createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
+ + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000);
}
int expectedRows = hfileIdx * 1000;
@@ -132,7 +132,7 @@ public class TestLoadIncrementalHFiles {
FileSystem fs = util.getTestFileSystem();
Path testIn = new Path(dir, "testhfile");
HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
- createHFile(fs, testIn, FAMILY, QUALIFIER,
+ createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER,
Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
Path bottomOut = new Path(dir, "bottom.out");
@@ -151,7 +151,7 @@ public class TestLoadIncrementalHFiles {
private int verifyHFile(Path p) throws IOException {
Configuration conf = util.getConfiguration();
- HFile.Reader reader = new HFile.Reader(
+ HFile.Reader reader = HFile.createReader(
p.getFileSystem(conf), p, null, false, false);
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, false);
@@ -171,11 +171,13 @@ public class TestLoadIncrementalHFiles {
* TODO put me in an HFileTestUtil or something?
*/
static void createHFile(
+ Configuration conf,
FileSystem fs, Path path,
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException
{
- HFile.Writer writer = new HFile.Writer(fs, path, BLOCKSIZE, COMPRESSION,
+ HFile.Writer writer = HFile.getWriterFactory(conf).createWriter(fs, path,
+ BLOCKSIZE, COMPRESSION,
KeyValue.KEY_COMPARATOR);
long now = System.currentTimeMillis();
try {
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java Wed Aug 3 19:59:48 2011
@@ -70,7 +70,8 @@ public class TestFSErrorsExposed {
HBaseTestingUtility.getTestDir("internalScannerExposesErrors"),
"regionname"), "familyname");
FaultyFileSystem fs = new FaultyFileSystem(util.getTestFileSystem());
- StoreFile.Writer writer = StoreFile.createWriter(fs, hfilePath, 2*1024);
+ StoreFile.Writer writer = StoreFile.createWriter(fs, hfilePath, 2*1024,
+ util.getConfiguration());
TestStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
@@ -111,7 +112,8 @@ public class TestFSErrorsExposed {
HBaseTestingUtility.getTestDir("internalScannerExposesErrors"),
"regionname"), "familyname");
FaultyFileSystem fs = new FaultyFileSystem(util.getTestFileSystem());
- StoreFile.Writer writer = StoreFile.createWriter(fs, hfilePath, 2 * 1024);
+ StoreFile.Writer writer = StoreFile.createWriter(fs, hfilePath, 2 * 1024,
+ util.getConfiguration());
TestStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Wed Aug 3 19:59:48 2011
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hbase.util.Byte
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
+import org.apache.hadoop.util.Progressable;
import org.mockito.Mockito;
import com.google.common.base.Joiner;
@@ -204,7 +206,7 @@ public class TestStore extends TestCase
Configuration c = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(c);
StoreFile.Writer w = StoreFile.createWriter(fs, storedir,
- StoreFile.DEFAULT_BLOCKSIZE_SMALL);
+ StoreFile.DEFAULT_BLOCKSIZE_SMALL, c);
w.appendMetadata(seqid + 1, false);
w.close();
this.store.close();
@@ -571,6 +573,14 @@ public class TestStore extends TestCase
return new FaultyOutputStream(super.create(p), faultPos);
}
+ @Override
+ public FSDataOutputStream create(Path f, FsPermission permission,
+ boolean overwrite, int bufferSize, short replication, long blockSize,
+ Progressable progress) throws IOException {
+ return new FaultyOutputStream(super.create(f, permission,
+ overwrite, bufferSize, replication, blockSize, progress), faultPos);
+ }
+
}
static class FaultyOutputStream extends FSDataOutputStream {
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Wed Aug 3 19:59:48 2011
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.io.hfile.
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.io.hfile.LruBlockCache.CacheStats;
+import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.mockito.Mockito;
@@ -87,7 +88,8 @@ public class TestStoreFile extends HBase
public void testBasicHalfMapFile() throws Exception {
// Make up a directory hierarchy that has a regiondir and familyname.
StoreFile.Writer writer = StoreFile.createWriter(this.fs,
- new Path(new Path(this.testDir, "regionname"), "familyname"), 2 * 1024);
+ new Path(new Path(this.testDir, "regionname"), "familyname"), 2 * 1024,
+ conf);
writeStoreFile(writer);
checkHalfHFile(new StoreFile(this.fs, writer.getPath(), true, conf,
StoreFile.BloomType.NONE, false));
@@ -127,7 +129,8 @@ public class TestStoreFile extends HBase
Path storedir = new Path(new Path(this.testDir, "regionname"), "familyname");
Path dir = new Path(storedir, "1234567890");
// Make a store file and write data to it.
- StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024);
+ StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024,
+ conf);
writeStoreFile(writer);
StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
StoreFile.BloomType.NONE, false);
@@ -197,8 +200,11 @@ public class TestStoreFile extends HBase
(topScanner.isSeeked() && topScanner.next())) {
key = topScanner.getKey();
- assertTrue(topScanner.getReader().getComparator().compare(key.array(),
- key.arrayOffset(), key.limit(), midkey, 0, midkey.length) >= 0);
+ if (topScanner.getReader().getComparator().compare(key.array(),
+ key.arrayOffset(), key.limit(), midkey, 0, midkey.length) < 0) {
+ fail("key=" + Bytes.toStringBinary(key) + " < midkey=" +
+ Bytes.toStringBinary(midkey));
+ }
if (first) {
first = false;
LOG.info("First in top: " + Bytes.toString(Bytes.toBytes(key)));
@@ -327,7 +333,8 @@ public class TestStoreFile extends HBase
private void bloomWriteRead(StoreFile.Writer writer, FileSystem fs)
throws Exception {
- float err = conf.getFloat(StoreFile.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
+ float err = conf.getFloat(
+ BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
Path f = writer.getPath();
long now = System.currentTimeMillis();
for (int i = 0; i < 2000; i += 2) {
@@ -362,25 +369,24 @@ public class TestStoreFile extends HBase
}
reader.close();
fs.delete(f, true);
- System.out.println("False negatives: " + falseNeg);
- assertEquals(0, falseNeg);
- System.out.println("False positives: " + falsePos);
- if (!(falsePos <= 2* 2000 * err)) {
- System.out.println("WTFBBQ! " + falsePos + ", " + (2* 2000 * err) );
- }
- assertTrue(falsePos <= 2* 2000 * err);
+ assertEquals("False negatives: " + falseNeg, 0, falseNeg);
+ int maxFalsePos = (int) (2 * 2000 * err);
+ assertTrue("Too many false positives: " + falsePos + " (err=" + err
+ + ", expected no more than " + maxFalsePos + ")",
+ falsePos <= maxFalsePos);
}
public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
- conf.setFloat(StoreFile.IO_STOREFILE_BLOOM_ERROR_RATE, (float)0.01);
- conf.setBoolean(StoreFile.IO_STOREFILE_BLOOM_ENABLED, true);
+ conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE,
+ (float) 0.01);
+ conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
// write the file
Path f = new Path(ROOT_DIR, getName());
StoreFile.Writer writer = new StoreFile.Writer(fs, f,
StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
- conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000, false);
+ conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
bloomWriteRead(writer, fs);
}
@@ -388,8 +394,8 @@ public class TestStoreFile extends HBase
public void testBloomTypes() throws Exception {
float err = (float) 0.01;
FileSystem fs = FileSystem.getLocal(conf);
- conf.setFloat(StoreFile.IO_STOREFILE_BLOOM_ERROR_RATE, err);
- conf.setBoolean(StoreFile.IO_STOREFILE_BLOOM_ENABLED, true);
+ conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, err);
+ conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
int rowCount = 50;
int colCount = 10;
@@ -411,7 +417,7 @@ public class TestStoreFile extends HBase
StoreFile.Writer writer = new StoreFile.Writer(fs, f,
StoreFile.DEFAULT_BLOCKSIZE_SMALL,
HFile.DEFAULT_COMPRESSION_ALGORITHM,
- conf, KeyValue.COMPARATOR, bt[x], expKeys[x], false);
+ conf, KeyValue.COMPARATOR, bt[x], expKeys[x]);
long now = System.currentTimeMillis();
for (int i = 0; i < rowCount*2; i += 2) { // rows
@@ -471,19 +477,23 @@ public class TestStoreFile extends HBase
float err = (float)0.005;
FileSystem fs = FileSystem.getLocal(conf);
Path f = new Path(ROOT_DIR, getName());
- conf.setFloat(StoreFile.IO_STOREFILE_BLOOM_ERROR_RATE, err);
- conf.setBoolean(StoreFile.IO_STOREFILE_BLOOM_ENABLED, true);
- conf.setInt(StoreFile.IO_STOREFILE_BLOOM_MAX_KEYS, 1000);
+ conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, err);
+ conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
+ conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_MAX_KEYS, 1000);
+
+ // This test only runs for HFile format version 1.
+ conf.setInt(HFile.FORMAT_VERSION_KEY, 1);
// this should not create a bloom because the max keys is too small
StoreFile.Writer writer = new StoreFile.Writer(fs, f,
StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
- conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000, false);
+ conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
assertFalse(writer.hasBloom());
writer.close();
fs.delete(f, true);
- conf.setInt(StoreFile.IO_STOREFILE_BLOOM_MAX_KEYS, Integer.MAX_VALUE);
+ conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_MAX_KEYS,
+ Integer.MAX_VALUE);
// TODO: commented out because we run out of java heap space on trunk
/*
@@ -500,8 +510,7 @@ public class TestStoreFile extends HBase
// because Java can't create a contiguous array > MAX_INT
writer = new StoreFile.Writer(fs, f,
StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
- conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, Integer.MAX_VALUE,
- false);
+ conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, Integer.MAX_VALUE);
assertFalse(writer.hasBloom());
writer.close();
fs.delete(f, true);
@@ -556,7 +565,7 @@ public class TestStoreFile extends HBase
}
/**
- *Generate a list of KeyValues for testing based on given parameters
+ * Generate a list of KeyValues for testing based on given parameters
* @param timestamps
* @param numRows
* @param qualifier
@@ -592,7 +601,8 @@ public class TestStoreFile extends HBase
Path storedir = new Path(new Path(this.testDir, "regionname"),
"familyname");
Path dir = new Path(storedir, "1234567890");
- StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024);
+ StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024,
+ conf);
List<KeyValue> kvList = getKeyValueSet(timestamps,numRows,
family, qualifier);
@@ -645,7 +655,7 @@ public class TestStoreFile extends HBase
long startEvicted = cs.getEvictedCount();
// Let's write a StoreFile with three blocks, with cache on write off
- conf.setBoolean("hbase.rs.cacheblocksonwrite", false);
+ conf.setBoolean(HFile.CACHE_BLOCKS_ON_WRITE_KEY, false);
Path pathCowOff = new Path(baseDir, "123456789");
StoreFile.Writer writer = writeStoreFile(conf, pathCowOff, 3);
StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
@@ -666,7 +676,7 @@ public class TestStoreFile extends HBase
reader.close();
// Now write a StoreFile with three blocks, with cache on write on
- conf.setBoolean("hbase.rs.cacheblocksonwrite", true);
+ conf.setBoolean(HFile.CACHE_BLOCKS_ON_WRITE_KEY, true);
Path pathCowOn = new Path(baseDir, "123456788");
writer = writeStoreFile(conf, pathCowOn, 3);
hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
@@ -702,6 +712,12 @@ public class TestStoreFile extends HBase
while ((kv1 = scannerOne.next()) != null) {
kv2 = scannerTwo.next();
assertTrue(kv1.equals(kv2));
+ assertTrue(Bytes.compareTo(
+ kv1.getBuffer(), kv1.getKeyOffset(), kv1.getKeyLength(),
+ kv2.getBuffer(), kv2.getKeyOffset(), kv2.getKeyLength()) == 0);
+ assertTrue(Bytes.compareTo(
+ kv1.getBuffer(), kv1.getValueOffset(), kv1.getValueLength(),
+ kv2.getBuffer(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
}
assertNull(scannerTwo.next());
assertEquals(startHit + 6, cs.getHitCount());
@@ -755,8 +771,7 @@ public class TestStoreFile extends HBase
int blockSize = totalSize / numBlocks;
StoreFile.Writer writer = new StoreFile.Writer(fs, path, blockSize,
HFile.DEFAULT_COMPRESSION_ALGORITHM,
- conf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000,
- conf.getBoolean("hbase.rs.cacheblocksonwrite", false));
+ conf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000);
// We'll write N-1 KVs to ensure we don't write an extra block
kvs.remove(kvs.size()-1);
for (KeyValue kv : kvs) {
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java Wed Aug 3 19:59:48 2011
@@ -199,7 +199,7 @@ public class TestWALReplay {
HLog wal = createWAL(this.conf);
HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf);
Path f = new Path(basedir, "hfile");
- HFile.Writer writer = new HFile.Writer(this.fs, f);
+ HFile.Writer writer = HFile.getWriterFactory(conf).createWriter(this.fs, f);
byte [] family = htd.getFamilies().iterator().next().getName();
byte [] row = Bytes.toBytes(tableNameStr);
writer.append(new KeyValue(row, family, family, row));
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java Wed Aug 3 19:59:48 2011
@@ -23,46 +23,45 @@ package org.apache.hadoop.hbase.util;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.nio.ByteBuffer;
-import java.util.BitSet;
import junit.framework.TestCase;
public class TestByteBloomFilter extends TestCase {
-
+
public void testBasicBloom() throws Exception {
ByteBloomFilter bf1 = new ByteBloomFilter(1000, (float)0.01, Hash.MURMUR_HASH, 0);
ByteBloomFilter bf2 = new ByteBloomFilter(1000, (float)0.01, Hash.MURMUR_HASH, 0);
bf1.allocBloom();
bf2.allocBloom();
-
+
// test 1: verify no fundamental false negatives or positives
byte[] key1 = {1,2,3,4,5,6,7,8,9};
byte[] key2 = {1,2,3,4,5,6,7,8,7};
-
+
bf1.add(key1);
bf2.add(key2);
-
+
assertTrue(bf1.contains(key1));
assertFalse(bf1.contains(key2));
assertFalse(bf2.contains(key1));
assertTrue(bf2.contains(key2));
-
+
byte [] bkey = {1,2,3,4};
byte [] bval = "this is a much larger byte array".getBytes();
-
+
bf1.add(bkey);
bf1.add(bval, 1, bval.length-1);
-
+
assertTrue( bf1.contains(bkey) );
assertTrue( bf1.contains(bval, 1, bval.length-1) );
assertFalse( bf1.contains(bval) );
assertFalse( bf1.contains(bval) );
-
- // test 2: serialization & deserialization.
+
+ // test 2: serialization & deserialization.
// (convert bloom to byte array & read byte array back in as input)
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
bf1.writeBloom(new DataOutputStream(bOut));
- ByteBuffer bb = ByteBuffer.wrap(bOut.toByteArray());
+ ByteBuffer bb = ByteBuffer.wrap(bOut.toByteArray());
ByteBloomFilter newBf1 = new ByteBloomFilter(1000, (float)0.01,
Hash.MURMUR_HASH, 0);
assertTrue(newBf1.contains(key1, bb));
@@ -71,16 +70,17 @@ public class TestByteBloomFilter extends
assertTrue( newBf1.contains(bval, 1, bval.length-1, bb) );
assertFalse( newBf1.contains(bval, bb) );
assertFalse( newBf1.contains(bval, bb) );
-
+
System.out.println("Serialized as " + bOut.size() + " bytes");
assertTrue(bOut.size() - bf1.byteSize < 10); //... allow small padding
}
-
+
public void testBloomFold() throws Exception {
// test: foldFactor < log(max/actual)
- ByteBloomFilter b = new ByteBloomFilter(1003, (float)0.01, Hash.MURMUR_HASH, 2);
+ ByteBloomFilter b = new ByteBloomFilter(1003, (float) 0.01,
+ Hash.MURMUR_HASH, 2);
b.allocBloom();
- int origSize = b.getByteSize();
+ long origSize = b.getByteSize();
assertEquals(1204, origSize);
for (int i = 0; i < 12; ++i) {
b.add(Bytes.toBytes(i));
@@ -106,7 +106,7 @@ public class TestByteBloomFilter extends
ByteBloomFilter b = new ByteBloomFilter(10*1000*1000, (float)err, Hash.MURMUR_HASH, 3);
b.allocBloom();
long startTime = System.currentTimeMillis();
- int origSize = b.getByteSize();
+ long origSize = b.getByteSize();
for (int i = 0; i < 1*1000*1000; ++i) {
b.add(Bytes.toBytes(i));
}
@@ -119,12 +119,12 @@ public class TestByteBloomFilter extends
endTime = System.currentTimeMillis();
System.out.println("Total Fold time = " + (endTime - startTime) + "ms");
assertTrue(origSize >= b.getByteSize()<<3);
-
+
// test
startTime = System.currentTimeMillis();
int falsePositives = 0;
for (int i = 0; i < 2*1000*1000; ++i) {
-
+
if (b.contains(Bytes.toBytes(i))) {
if(i >= 1*1000*1000) falsePositives++;
} else {
@@ -138,4 +138,27 @@ public class TestByteBloomFilter extends
// test: foldFactor > log(max/actual)
}
+
+ public void testSizing() {
+ int bitSize = 8 * 128 * 1024; // 128 KB
+ double errorRate = 0.025; // target false positive rate
+
+ // How many keys can we store in a Bloom filter of this size maintaining
+ // the given false positive rate, not taking into account that the n
+ long maxKeys = ByteBloomFilter.idealMaxKeys(bitSize, errorRate);
+ assertEquals(136570, maxKeys);
+
+ // A reverse operation: how many bits would we need to store this many keys
+ // and keep the same low false positive rate?
+ long bitSize2 = ByteBloomFilter.computeBitSize(maxKeys, errorRate);
+
+ // The bit size comes out a little different due to rounding.
+ assertTrue(Math.abs(bitSize2 - bitSize) * 1.0 / bitSize < 1e-5);
+ }
+
+ public void testFoldableByteSize() {
+ assertEquals(128, ByteBloomFilter.computeFoldableByteSize(1000, 5));
+ assertEquals(640, ByteBloomFilter.computeFoldableByteSize(5001, 4));
+ }
+
}
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java?rev=1153634&r1=1153633&r2=1153634&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java Wed Aug 3 19:59:48 2011
@@ -19,6 +19,10 @@
*/
package org.apache.hadoop.hbase.util;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
@@ -142,6 +146,8 @@ public class TestBytes extends TestCase
byte [] key2 = {4,9};
byte [] key2_2 = {4};
byte [] key3 = {5,11};
+ byte [] key4 = {0};
+ byte [] key5 = {2};
assertEquals(1, Bytes.binarySearch(arr, key1, 0, 1,
Bytes.BYTES_RAWCOMPARATOR));
@@ -157,8 +163,22 @@ public class TestBytes extends TestCase
Bytes.BYTES_RAWCOMPARATOR));
assertEquals(5, Bytes.binarySearch(arr, key3, 1, 1,
Bytes.BYTES_RAWCOMPARATOR));
+ assertEquals(-1,
+ Bytes.binarySearch(arr, key4, 0, 1, Bytes.BYTES_RAWCOMPARATOR));
+ assertEquals(-2,
+ Bytes.binarySearch(arr, key5, 0, 1, Bytes.BYTES_RAWCOMPARATOR));
+
+ // Search for values to the left and to the right of each item in the array.
+ for (int i = 0; i < arr.length; ++i) {
+ assertEquals(-(i + 1), Bytes.binarySearch(arr,
+ new byte[] { (byte) (arr[i][0] - 1) }, 0, 1,
+ Bytes.BYTES_RAWCOMPARATOR));
+ assertEquals(-(i + 2), Bytes.binarySearch(arr,
+ new byte[] { (byte) (arr[i][0] + 1) }, 0, 1,
+ Bytes.BYTES_RAWCOMPARATOR));
+ }
}
-
+
public void testStartsWith() {
assertTrue(Bytes.startsWith(Bytes.toBytes("hello"), Bytes.toBytes("h")));
assertTrue(Bytes.startsWith(Bytes.toBytes("hello"), Bytes.toBytes("")));
@@ -202,4 +222,30 @@ public class TestBytes extends TestCase
return (Bytes.toLong(testValue) + amount) == incrementResult;
}
+
+ public void testFixedSizeString() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutputStream dos = new DataOutputStream(baos);
+ Bytes.writeStringFixedSize(dos, "Hello", 5);
+ Bytes.writeStringFixedSize(dos, "World", 18);
+ Bytes.writeStringFixedSize(dos, "", 9);
+
+ try {
+ // Use a long dash which is three bytes in UTF-8. If encoding happens
+ // using ISO-8859-1, this will fail.
+ Bytes.writeStringFixedSize(dos, "Too\u2013Long", 9);
+ fail("Exception expected");
+ } catch (IOException ex) {
+ assertEquals(
+ "Trying to write 10 bytes (Too\\xE2\\x80\\x93Long) into a field of " +
+ "length 9", ex.getMessage());
+ }
+
+ ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
+ DataInputStream dis = new DataInputStream(bais);
+ assertEquals("Hello", Bytes.readStringFixedSize(dis, 5));
+ assertEquals("World", Bytes.readStringFixedSize(dis, 18));
+ assertEquals("", Bytes.readStringFixedSize(dis, 9));
+ }
+
}