You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2015/05/17 01:59:02 UTC
[34/50] hadoop git commit: Revert mistakenly committing of
HADOOP-11920 patch
Revert mistakenly committing of HADOOP-11920 patch
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ecf1f4e8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ecf1f4e8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ecf1f4e8
Branch: refs/heads/HDFS-7285
Commit: ecf1f4e84659f7e6bcb2597e1dddee69f3c0b71c
Parents: f91394f
Author: Kai Zheng <ka...@intel.com>
Authored: Thu May 7 17:02:14 2015 +0800
Committer: Jing Zhao <ji...@apache.org>
Committed: Sat May 16 15:16:06 2015 -0700
----------------------------------------------------------------------
.../hadoop-common/CHANGES-HDFS-EC-7285.txt | 2 -
.../hadoop/fs/CommonConfigurationKeys.java | 4 ++
.../apache/hadoop/io/erasurecode/ECChunk.java | 20 ++------
.../erasurecode/coder/AbstractErasureCoder.java | 6 ++-
.../io/erasurecode/coder/RSErasureDecoder.java | 40 ++++++++++++++-
.../rawcoder/AbstractRawErasureCoder.java | 35 +-------------
.../rawcoder/AbstractRawErasureDecoder.java | 51 ++++++++------------
.../rawcoder/AbstractRawErasureEncoder.java | 48 +++++++++---------
.../erasurecode/rawcoder/RawErasureCoder.java | 8 +--
.../erasurecode/rawcoder/RawErasureDecoder.java | 24 ++++-----
.../io/erasurecode/rawcoder/XORRawDecoder.java | 24 ++++++---
.../io/erasurecode/rawcoder/XORRawEncoder.java | 6 +--
.../hadoop/io/erasurecode/TestCoderBase.java | 4 +-
.../erasurecode/coder/TestRSErasureCoder.java | 4 +-
14 files changed, 130 insertions(+), 146 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
index 7a344a8..9749270 100644
--- a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
@@ -44,5 +44,3 @@
HADOOP-11818. Minor improvements for erasurecode classes. (Rakesh R via Kai Zheng)
HADOOP-11841. Remove unused ecschema-def.xml files. (szetszwo)
-
- HADOOP-11921 Enhance tests for erasure coders. (Kai Zheng)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
index 3f2871b..bd2a24b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
@@ -143,6 +143,10 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
/** Supported erasure codec classes */
public static final String IO_ERASURECODE_CODECS_KEY = "io.erasurecode.codecs";
+ /** Use XOR raw coder when possible for the RS codec */
+ public static final String IO_ERASURECODE_CODEC_RS_USEXOR_KEY =
+ "io.erasurecode.codec.rs.usexor";
+
/** Raw coder factory for the RS codec */
public static final String IO_ERASURECODE_CODEC_RS_RAWCODER_KEY =
"io.erasurecode.codec.rs.rawcoder";
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
index 34dd90b..01e8f35 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
@@ -58,15 +58,8 @@ public class ECChunk {
public static ByteBuffer[] toBuffers(ECChunk[] chunks) {
ByteBuffer[] buffers = new ByteBuffer[chunks.length];
- ECChunk chunk;
for (int i = 0; i < chunks.length; i++) {
- chunk = chunks[i];
- if (chunk == null) {
- buffers[i] = null;
- continue;
- }
-
- buffers[i] = chunk.getBuffer();
+ buffers[i] = chunks[i].getBuffer();
}
return buffers;
@@ -78,19 +71,12 @@ public class ECChunk {
* @param chunks
* @return an array of byte array
*/
- public static byte[][] toArrays(ECChunk[] chunks) {
+ public static byte[][] toArray(ECChunk[] chunks) {
byte[][] bytesArr = new byte[chunks.length][];
ByteBuffer buffer;
- ECChunk chunk;
for (int i = 0; i < chunks.length; i++) {
- chunk = chunks[i];
- if (chunk == null) {
- bytesArr[i] = null;
- continue;
- }
-
- buffer = chunk.getBuffer();
+ buffer = chunks[i].getBuffer();
if (buffer.hasArray()) {
bytesArr[i] = buffer.array();
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
index d491570..7403e35 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
@@ -90,7 +90,11 @@ public abstract class AbstractErasureCoder
throw new RuntimeException("Failed to create raw coder", e);
}
- return isEncoder ? fact.createEncoder() : fact.createDecoder();
+ if (fact != null) {
+ return isEncoder ? fact.createEncoder() : fact.createDecoder();
+ }
+
+ return null;
}
@Override
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
index ec7cbb5..fc664a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
@@ -17,11 +17,13 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
/**
* Reed-Solomon erasure decoder that decodes a block group.
@@ -30,14 +32,38 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
*/
public class RSErasureDecoder extends AbstractErasureDecoder {
private RawErasureDecoder rsRawDecoder;
+ private RawErasureDecoder xorRawDecoder;
+ private boolean useXorWhenPossible = true;
@Override
+ public void setConf(Configuration conf) {
+ super.setConf(conf);
+
+ if (conf != null) {
+ this.useXorWhenPossible = conf.getBoolean(
+ CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_USEXOR_KEY, true);
+ }
+ }
+
+ @Override
protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
+ RawErasureDecoder rawDecoder;
+
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
- RawErasureDecoder rawDecoder = checkCreateRSRawDecoder();
+ /**
+ * Optimization: according to some benchmark, when only one block is erased
+ * and to be recovering, the most simple XOR scheme can be much efficient.
+ * We will have benchmark tests to verify this opt is effect or not.
+ */
+ if (outputBlocks.length == 1 && useXorWhenPossible) {
+ rawDecoder = checkCreateXorRawDecoder();
+ } else {
+ rawDecoder = checkCreateRSRawDecoder();
+ }
+
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks), outputBlocks, rawDecoder);
}
@@ -55,9 +81,19 @@ public class RSErasureDecoder extends AbstractErasureDecoder {
return rsRawDecoder;
}
+ private RawErasureDecoder checkCreateXorRawDecoder() {
+ if (xorRawDecoder == null) {
+ xorRawDecoder = new XORRawDecoder();
+ xorRawDecoder.initialize(getNumDataUnits(), 1, getChunkSize());
+ }
+ return xorRawDecoder;
+ }
+
@Override
public void release() {
- if (rsRawDecoder != null) {
+ if (xorRawDecoder != null) {
+ xorRawDecoder.release();
+ } else if (rsRawDecoder != null) {
rsRawDecoder.release();
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
index eed9035..e6f3d92 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
@@ -19,9 +19,6 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.conf.Configured;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
/**
* A common class of basic facilities to be shared by encoder and decoder
*
@@ -30,9 +27,6 @@ import java.util.Arrays;
public abstract class AbstractRawErasureCoder
extends Configured implements RawErasureCoder {
- // Hope to reset coding buffers a little faster using it
- protected byte[] zeroChunkBytes;
-
private int numDataUnits;
private int numParityUnits;
private int chunkSize;
@@ -43,8 +37,6 @@ public abstract class AbstractRawErasureCoder
this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits;
this.chunkSize = chunkSize;
-
- zeroChunkBytes = new byte[chunkSize]; // With ZERO by default
}
@Override
@@ -63,7 +55,7 @@ public abstract class AbstractRawErasureCoder
}
@Override
- public boolean preferDirectBuffer() {
+ public boolean preferNativeBuffer() {
return false;
}
@@ -71,29 +63,4 @@ public abstract class AbstractRawErasureCoder
public void release() {
// Nothing to do by default
}
- /**
- * Ensure the buffer (either input or output) ready to read or write with ZERO
- * bytes fully in chunkSize.
- * @param buffer
- * @return the buffer itself
- */
- protected ByteBuffer resetDirectBuffer(ByteBuffer buffer) {
- buffer.clear();
- buffer.put(zeroChunkBytes);
- buffer.position(0);
-
- return buffer;
- }
-
- /**
- * Ensure the buffer (either input or output) ready to read or write with ZERO
- * bytes fully in chunkSize.
- * @param buffer bytes array buffer
- * @return the buffer itself
- */
- protected byte[] resetArrayBuffer(byte[] buffer) {
- System.arraycopy(zeroChunkBytes, 0, buffer, 0, buffer.length);
-
- return buffer;
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
index 17f867a..4613b25 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
@@ -32,7 +32,9 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
@Override
public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) {
- checkParameters(inputs, erasedIndexes, outputs);
+ if (erasedIndexes.length == 0) {
+ return;
+ }
doDecode(inputs, erasedIndexes, outputs);
}
@@ -48,7 +50,9 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
@Override
public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) {
- checkParameters(inputs, erasedIndexes, outputs);
+ if (erasedIndexes.length == 0) {
+ return;
+ }
doDecode(inputs, erasedIndexes, outputs);
}
@@ -65,42 +69,25 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
@Override
public void decode(ECChunk[] inputs, int[] erasedIndexes,
ECChunk[] outputs) {
- checkParameters(inputs, erasedIndexes, outputs);
-
- boolean hasArray = inputs[0].getBuffer().hasArray();
- if (hasArray) {
- byte[][] inputBytesArr = ECChunk.toArrays(inputs);
- byte[][] outputBytesArr = ECChunk.toArrays(outputs);
- doDecode(inputBytesArr, erasedIndexes, outputBytesArr);
- } else {
- ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputs);
- ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputs);
- doDecode(inputBuffers, erasedIndexes, outputBuffers);
- }
+ doDecode(inputs, erasedIndexes, outputs);
}
+
/**
- * Check and validate decoding parameters, throw exception accordingly. The
- * checking assumes it's a MDS code. Other code can override this.
+ * Perform the real decoding using chunks
* @param inputs
* @param erasedIndexes
* @param outputs
*/
- protected void checkParameters(Object[] inputs, int[] erasedIndexes,
- Object[] outputs) {
- if (inputs.length != getNumParityUnits() + getNumDataUnits()) {
- throw new IllegalArgumentException("Invalid inputs length");
- }
-
- if (erasedIndexes.length != outputs.length) {
- throw new IllegalArgumentException(
- "erasedIndexes and outputs mismatch in length");
- }
-
- if (erasedIndexes.length > getNumParityUnits()) {
- throw new IllegalArgumentException(
- "Too many erased, not recoverable");
+ protected void doDecode(ECChunk[] inputs, int[] erasedIndexes,
+ ECChunk[] outputs) {
+ if (inputs[0].getBuffer().hasArray()) {
+ byte[][] inputBytesArr = ECChunk.toArray(inputs);
+ byte[][] outputBytesArr = ECChunk.toArray(outputs);
+ doDecode(inputBytesArr, erasedIndexes, outputBytesArr);
+ } else {
+ ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputs);
+ ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputs);
+ doDecode(inputBuffers, erasedIndexes, outputBuffers);
}
-
-
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
index 51695e8..4feaf39 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
@@ -31,7 +31,8 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
@Override
public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
- checkParameters(inputs, outputs);
+ assert (inputs.length == getNumDataUnits());
+ assert (outputs.length == getNumParityUnits());
doEncode(inputs, outputs);
}
@@ -45,7 +46,8 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
@Override
public void encode(byte[][] inputs, byte[][] outputs) {
- checkParameters(inputs, outputs);
+ assert (inputs.length == getNumDataUnits());
+ assert (outputs.length == getNumParityUnits());
doEncode(inputs, outputs);
}
@@ -59,37 +61,33 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
@Override
public void encode(ECChunk[] inputs, ECChunk[] outputs) {
- checkParameters(inputs, outputs);
+ assert (inputs.length == getNumDataUnits());
+ assert (outputs.length == getNumParityUnits());
+ doEncode(inputs, outputs);
+ }
+
+ /**
+ * Perform the real encoding work using chunks.
+ * @param inputs
+ * @param outputs
+ */
+ protected void doEncode(ECChunk[] inputs, ECChunk[] outputs) {
/**
- * Note callers may pass byte array, or direct buffer via ECChunk according
+ * Note callers may pass byte array, or ByteBuffer via ECChunk according
* to how ECChunk is created. Some implementations of coder use byte array
- * (ex: pure Java), some use direct buffer (ex: ISA-L), all for the better
- * performance.
+ * (ex: pure Java), some use native ByteBuffer (ex: ISA-L), all for the
+ * better performance.
*/
- boolean hasArray = inputs[0].getBuffer().hasArray();
- if (hasArray) {
- byte[][] inputBytesArr = ECChunk.toArrays(inputs);
- byte[][] outputBytesArr = ECChunk.toArrays(outputs);
- encode(inputBytesArr, outputBytesArr);
+ if (inputs[0].getBuffer().hasArray()) {
+ byte[][] inputBytesArr = ECChunk.toArray(inputs);
+ byte[][] outputBytesArr = ECChunk.toArray(outputs);
+ doEncode(inputBytesArr, outputBytesArr);
} else {
ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputs);
ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputs);
- encode(inputBuffers, outputBuffers);
+ doEncode(inputBuffers, outputBuffers);
}
}
- /**
- * Check and validate decoding parameters, throw exception accordingly.
- * @param inputs
- * @param outputs
- */
- protected void checkParameters(Object[] inputs, Object[] outputs) {
- if (inputs.length != getNumDataUnits()) {
- throw new IllegalArgumentException("Invalid inputs length");
- }
- if (outputs.length != getNumParityUnits()) {
- throw new IllegalArgumentException("Invalid outputs length");
- }
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
index 3fb211f..9af5b6c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
@@ -64,13 +64,13 @@ public interface RawErasureCoder extends Configurable {
public int getChunkSize();
/**
- * Tell if direct buffer is preferred or not. It's for callers to
- * decide how to allocate coding chunk buffers, using DirectByteBuffer or
- * bytes array. It will return false by default.
+ * Tell if native or off-heap buffer is preferred or not. It's for callers to
+ * decide how to allocate coding chunk buffers, either on heap or off heap.
+ * It will return false by default.
* @return true if native buffer is preferred for performance consideration,
* otherwise false.
*/
- public boolean preferDirectBuffer();
+ public boolean preferNativeBuffer();
/**
* Should be called when release this coder. Good chance to release encoding
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
index 1807da7..1358b7d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
@@ -31,30 +31,24 @@ import java.nio.ByteBuffer;
public interface RawErasureDecoder extends RawErasureCoder {
/**
- * Decode with inputs and erasedIndexes, generates outputs.
- * @param inputs inputs to read data from
- * @param erasedIndexes indexes of erased units in the inputs array
- * @param outputs outputs to write into for data generated according to
- * erasedIndexes
+ * Decode with inputs and erasedIndexes, generates outputs
+ * @param inputs
+ * @param outputs
*/
public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs);
/**
- * Decode with inputs and erasedIndexes, generates outputs.
- * @param inputs inputs to read data from
- * @param erasedIndexes indexes of erased units in the inputs array
- * @param outputs outputs to write into for data generated according to
- * erasedIndexes
+ * Decode with inputs and erasedIndexes, generates outputs
+ * @param inputs
+ * @param outputs
*/
public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs);
/**
- * Decode with inputs and erasedIndexes, generates outputs.
- * @param inputs inputs to read data from
- * @param erasedIndexes indexes of erased units in the inputs array
- * @param outputs outputs to write into for data generated according to
- * erasedIndexes
+ * Decode with inputs and erasedIndexes, generates outputs
+ * @param inputs
+ * @param outputs
*/
public void decode(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
index 7c11681..b6b1633 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
@@ -27,11 +27,17 @@ public class XORRawDecoder extends AbstractRawErasureDecoder {
@Override
protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) {
- resetDirectBuffer(outputs[0]);
+ assert(erasedIndexes.length == outputs.length);
+ assert(erasedIndexes.length <= 1);
- int bufSize = getChunkSize();
+ int bufSize = inputs[0].remaining();
int erasedIdx = erasedIndexes[0];
+ // Set the output to zeros.
+ for (int j = 0; j < bufSize; j++) {
+ outputs[0].put(j, (byte) 0);
+ }
+
// Process the inputs.
for (int i = 0; i < inputs.length; i++) {
// Skip the erased location.
@@ -46,13 +52,19 @@ public class XORRawDecoder extends AbstractRawErasureDecoder {
}
@Override
- protected void doDecode(byte[][] inputs,
- int[] erasedIndexes, byte[][] outputs) {
- resetArrayBuffer(outputs[0]);
+ protected void doDecode(byte[][] inputs, int[] erasedIndexes,
+ byte[][] outputs) {
+ assert(erasedIndexes.length == outputs.length);
+ assert(erasedIndexes.length <= 1);
- int bufSize = getChunkSize();
+ int bufSize = inputs[0].length;
int erasedIdx = erasedIndexes[0];
+ // Set the output to zeros.
+ for (int j = 0; j < bufSize; j++) {
+ outputs[0][j] = 0;
+ }
+
// Process the inputs.
for (int i = 0; i < inputs.length; i++) {
// Skip the erased location.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
index c751a73..dbfab5d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
@@ -26,9 +26,8 @@ public class XORRawEncoder extends AbstractRawErasureEncoder {
@Override
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
- resetDirectBuffer(outputs[0]);
+ int bufSize = inputs[0].remaining();
- int bufSize = getChunkSize();
// Get the first buffer's data.
for (int j = 0; j < bufSize; j++) {
outputs[0].put(j, inputs[0].get(j));
@@ -44,9 +43,8 @@ public class XORRawEncoder extends AbstractRawErasureEncoder {
@Override
protected void doEncode(byte[][] inputs, byte[][] outputs) {
- resetArrayBuffer(outputs[0]);
+ int bufSize = inputs[0].length;
- int bufSize = getChunkSize();
// Get the first buffer's data.
for (int j = 0; j < bufSize; j++) {
outputs[0][j] = inputs[0][j];
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
index 9c88665..22fd98d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
@@ -75,8 +75,8 @@ public abstract class TestCoderBase {
*/
protected void compareAndVerify(ECChunk[] erasedChunks,
ECChunk[] recoveredChunks) {
- byte[][] erased = ECChunk.toArrays(erasedChunks);
- byte[][] recovered = ECChunk.toArrays(recoveredChunks);
+ byte[][] erased = ECChunk.toArray(erasedChunks);
+ byte[][] recovered = ECChunk.toArray(recoveredChunks);
boolean result = Arrays.deepEquals(erased, recovered);
assertTrue("Decoding and comparing failed.", result);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecf1f4e8/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
index 6b3e742..3507dd2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
@@ -59,9 +59,9 @@ public class TestRSErasureCoder extends TestErasureCoderBase {
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
RSRawErasureCoderFactory.class.getCanonicalName());
-
+ conf.setBoolean(
+ CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_USEXOR_KEY, false);
prepare(conf, 10, 4, null);
-
testCoding(true);
}