You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/08/15 21:57:18 UTC
hbase git commit: HBASE-18587 Fix flaky TestFileIOEngine
Repository: hbase
Updated Branches:
refs/heads/master 2b88edfd8 -> 5280c100f
HBASE-18587 Fix flaky TestFileIOEngine
This short circuits reads and writes with 0 length and also removes flakiness in TestFileIOEngine
Signed-off-by: Michael Stack <st...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5280c100
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5280c100
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5280c100
Branch: refs/heads/master
Commit: 5280c100ff93f65cd568ce830e088cc12a2f5585
Parents: 2b88edf
Author: Zach York <zy...@amazon.com>
Authored: Thu Aug 10 16:55:28 2017 -0700
Committer: Michael Stack <st...@apache.org>
Committed: Tue Aug 15 14:57:10 2017 -0700
----------------------------------------------------------------------
.../hbase/io/hfile/bucket/FileIOEngine.java | 23 ++--
.../hbase/io/hfile/bucket/TestFileIOEngine.java | 123 +++++++++++--------
2 files changed, 88 insertions(+), 58 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/5280c100/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index a847bfe..ab77696 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.util.StringUtils;
/**
@@ -122,15 +123,18 @@ public class FileIOEngine implements IOEngine {
@Override
public Cacheable read(long offset, int length, CacheableDeserializer<Cacheable> deserializer)
throws IOException {
+ Preconditions.checkArgument(length >= 0, "Length of read can not be less than 0.");
ByteBuffer dstBuffer = ByteBuffer.allocate(length);
- accessFile(readAccessor, dstBuffer, offset);
- // The buffer created out of the fileChannel is formed by copying the data from the file
- // Hence in this case there is no shared memory that we point to. Even if the BucketCache evicts
- // this buffer from the file the data is already copied and there is no need to ensure that
- // the results are not corrupted before consuming them.
- if (dstBuffer.limit() != length) {
- throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " + length
- + " expected");
+ if (length != 0) {
+ accessFile(readAccessor, dstBuffer, offset);
+ // The buffer created out of the fileChannel is formed by copying the data from the file
+ // Hence in this case there is no shared memory that we point to. Even if the BucketCache evicts
+ // this buffer from the file the data is already copied and there is no need to ensure that
+ // the results are not corrupted before consuming them.
+ if (dstBuffer.limit() != length) {
+ throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " + length
+ + " expected");
+ }
}
return deserializer.deserialize(new SingleByteBuff(dstBuffer), true, MemoryType.EXCLUSIVE);
}
@@ -143,6 +147,9 @@ public class FileIOEngine implements IOEngine {
*/
@Override
public void write(ByteBuffer srcBuffer, long offset) throws IOException {
+ if (!srcBuffer.hasRemaining()) {
+ return;
+ }
accessFile(writeAccessor, srcBuffer, offset);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5280c100/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index d13022d..4451c0c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
@@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.BufferGrab
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -38,62 +41,82 @@ import org.junit.experimental.categories.Category;
*/
@Category({IOTests.class, SmallTests.class})
public class TestFileIOEngine {
- @Test
- public void testFileIOEngine() throws IOException {
- long totalCapacity = 6 * 1024 * 1024; // 6 MB
- String[] filePaths = { "testFileIOEngine1", "testFileIOEngine2",
- "testFileIOEngine3" };
- long sizePerFile = totalCapacity / filePaths.length; // 2 MB per File
- List<Long> boundaryStartPositions = new ArrayList<Long>();
+
+ private static final long TOTAL_CAPACITY = 6 * 1024 * 1024; // 6 MB
+ private static final String[] FILE_PATHS = {"testFileIOEngine1", "testFileIOEngine2",
+ "testFileIOEngine3"};
+ private static final long SIZE_PER_FILE = TOTAL_CAPACITY / FILE_PATHS.length; // 2 MB per File
+ private final static List<Long> boundaryStartPositions = new ArrayList<Long>();
+ private final static List<Long> boundaryStopPositions = new ArrayList<Long>();
+
+ private FileIOEngine fileIOEngine;
+
+ static {
boundaryStartPositions.add(0L);
- for (int i = 1; i < filePaths.length; i++) {
- boundaryStartPositions.add(sizePerFile * i - 1);
- boundaryStartPositions.add(sizePerFile * i);
- boundaryStartPositions.add(sizePerFile * i + 1);
+ for (int i = 1; i < FILE_PATHS.length; i++) {
+ boundaryStartPositions.add(SIZE_PER_FILE * i - 1);
+ boundaryStartPositions.add(SIZE_PER_FILE * i);
+ boundaryStartPositions.add(SIZE_PER_FILE * i + 1);
}
- List<Long> boundaryStopPositions = new ArrayList<Long>();
- for (int i = 1; i < filePaths.length; i++) {
- boundaryStopPositions.add(sizePerFile * i - 1);
- boundaryStopPositions.add(sizePerFile * i);
- boundaryStopPositions.add(sizePerFile * i + 1);
+ for (int i = 1; i < FILE_PATHS.length; i++) {
+ boundaryStopPositions.add(SIZE_PER_FILE * i - 1);
+ boundaryStopPositions.add(SIZE_PER_FILE * i);
+ boundaryStopPositions.add(SIZE_PER_FILE * i + 1);
+ }
+ boundaryStopPositions.add(SIZE_PER_FILE * FILE_PATHS.length - 1);
+ }
+
+ @Before
+ public void setUp() throws IOException {
+ fileIOEngine = new FileIOEngine(TOTAL_CAPACITY, false, FILE_PATHS);
+ }
+
+ @After
+ public void cleanUp() {
+ fileIOEngine.shutdown();
+ for (String filePath : FILE_PATHS) {
+ File file = new File(filePath);
+ if (file.exists()) {
+ file.delete();
+ }
}
- boundaryStopPositions.add(sizePerFile * filePaths.length - 1);
- FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, false, filePaths);
- try {
- for (int i = 0; i < 500; i++) {
- int len = (int) Math.floor(Math.random() * 100);
- long offset = (long) Math.floor(Math.random() * totalCapacity % (totalCapacity - len));
- if (i < boundaryStartPositions.size()) {
- // make the boundary start positon
- offset = boundaryStartPositions.get(i);
- } else if ((i - boundaryStartPositions.size()) < boundaryStopPositions.size()) {
- // make the boundary stop positon
- offset = boundaryStopPositions.get(i - boundaryStartPositions.size()) - len + 1;
- } else if (i % 2 == 0) {
- // make the cross-files block writing/reading
- offset = Math.max(1, i % filePaths.length) * sizePerFile - len / 2;
- }
- byte[] data1 = new byte[len];
- for (int j = 0; j < data1.length; ++j) {
- data1[j] = (byte) (Math.random() * 255);
- }
- fileIOEngine.write(ByteBuffer.wrap(data1), offset);
- BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
- fileIOEngine.read(offset, len, deserializer);
- ByteBuff data2 = deserializer.getDeserializedByteBuff();
- for (int j = 0; j < data1.length; ++j) {
- assertTrue(data1[j] == data2.get(j));
- }
+ }
+
+ @Test
+ public void testFileIOEngine() throws IOException {
+ for (int i = 0; i < 500; i++) {
+ int len = (int) Math.floor(Math.random() * 100) + 1;
+ long offset = (long) Math.floor(Math.random() * TOTAL_CAPACITY % (TOTAL_CAPACITY - len));
+ if (i < boundaryStartPositions.size()) {
+ // make the boundary start positon
+ offset = boundaryStartPositions.get(i);
+ } else if ((i - boundaryStartPositions.size()) < boundaryStopPositions.size()) {
+ // make the boundary stop positon
+ offset = boundaryStopPositions.get(i - boundaryStartPositions.size()) - len + 1;
+ } else if (i % 2 == 0) {
+ // make the cross-files block writing/reading
+ offset = Math.max(1, i % FILE_PATHS.length) * SIZE_PER_FILE - len / 2;
}
- } finally {
- fileIOEngine.shutdown();
- for (String filePath : filePaths) {
- File file = new File(filePath);
- if (file.exists()) {
- file.delete();
- }
+ byte[] data1 = new byte[len];
+ for (int j = 0; j < data1.length; ++j) {
+ data1[j] = (byte) (Math.random() * 255);
}
+ fileIOEngine.write(ByteBuffer.wrap(data1), offset);
+ BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
+ fileIOEngine.read(offset, len, deserializer);
+ ByteBuff data2 = deserializer.getDeserializedByteBuff();
+ assertArrayEquals(data1, data2.array());
}
+ }
+
+ @Test
+ public void testFileIOEngineHandlesZeroLengthInput() throws IOException {
+ byte[] data1 = new byte[0];
+ fileIOEngine.write(ByteBuffer.wrap(data1), 0);
+ BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
+ fileIOEngine.read(0, 0, deserializer);
+ ByteBuff data2 = deserializer.getDeserializedByteBuff();
+ assertArrayEquals(data1, data2.array());
}
}