You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2015/05/17 01:58:53 UTC

[25/50] hadoop git commit: HADOOP-11921. Enhance tests for erasure coders. Contributed by Kai Zheng

HADOOP-11921. Enhance tests for erasure coders. Contributed by Kai Zheng


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9929c01b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9929c01b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9929c01b

Branch: refs/heads/HDFS-7285
Commit: 9929c01bfb0b194cce601a158aeebed5094ab8b2
Parents: ff30164
Author: Kai Zheng <ka...@intel.com>
Authored: Thu May 7 06:07:51 2015 +0800
Committer: Jing Zhao <ji...@apache.org>
Committed: Sat May 16 15:16:05 2015 -0700

----------------------------------------------------------------------
 .../hadoop-common/CHANGES-HDFS-EC-7285.txt      |  2 +
 .../hadoop/fs/CommonConfigurationKeys.java      |  4 --
 .../apache/hadoop/io/erasurecode/ECChunk.java   | 20 ++++++--
 .../erasurecode/coder/AbstractErasureCoder.java |  6 +--
 .../io/erasurecode/coder/RSErasureDecoder.java  | 40 +--------------
 .../rawcoder/AbstractRawErasureCoder.java       | 35 +++++++++++++-
 .../rawcoder/AbstractRawErasureDecoder.java     | 51 ++++++++++++--------
 .../rawcoder/AbstractRawErasureEncoder.java     | 48 +++++++++---------
 .../erasurecode/rawcoder/RawErasureCoder.java   |  8 +--
 .../erasurecode/rawcoder/RawErasureDecoder.java | 24 +++++----
 .../io/erasurecode/rawcoder/XORRawDecoder.java  | 24 +++------
 .../io/erasurecode/rawcoder/XORRawEncoder.java  |  6 ++-
 .../hadoop/io/erasurecode/TestCoderBase.java    |  4 +-
 .../erasurecode/coder/TestRSErasureCoder.java   |  4 +-
 14 files changed, 146 insertions(+), 130 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
index 9749270..7a344a8 100644
--- a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt
@@ -44,3 +44,5 @@
     HADOOP-11818. Minor improvements for erasurecode classes. (Rakesh R via Kai Zheng)
 
     HADOOP-11841. Remove unused ecschema-def.xml files.  (szetszwo)
+
+    HADOOP-11921 Enhance tests for erasure coders. (Kai Zheng)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
index bd2a24b..3f2871b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
@@ -143,10 +143,6 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
   /** Supported erasure codec classes */
   public static final String IO_ERASURECODE_CODECS_KEY = "io.erasurecode.codecs";
 
-  /** Use XOR raw coder when possible for the RS codec */
-  public static final String IO_ERASURECODE_CODEC_RS_USEXOR_KEY =
-      "io.erasurecode.codec.rs.usexor";
-
   /** Raw coder factory for the RS codec */
   public static final String IO_ERASURECODE_CODEC_RS_RAWCODER_KEY =
       "io.erasurecode.codec.rs.rawcoder";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
index 01e8f35..34dd90b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECChunk.java
@@ -58,8 +58,15 @@ public class ECChunk {
   public static ByteBuffer[] toBuffers(ECChunk[] chunks) {
     ByteBuffer[] buffers = new ByteBuffer[chunks.length];
 
+    ECChunk chunk;
     for (int i = 0; i < chunks.length; i++) {
-      buffers[i] = chunks[i].getBuffer();
+      chunk = chunks[i];
+      if (chunk == null) {
+        buffers[i] = null;
+        continue;
+      }
+
+      buffers[i] = chunk.getBuffer();
     }
 
     return buffers;
@@ -71,12 +78,19 @@ public class ECChunk {
    * @param chunks
    * @return an array of byte array
    */
-  public static byte[][] toArray(ECChunk[] chunks) {
+  public static byte[][] toArrays(ECChunk[] chunks) {
     byte[][] bytesArr = new byte[chunks.length][];
 
     ByteBuffer buffer;
+    ECChunk chunk;
     for (int i = 0; i < chunks.length; i++) {
-      buffer = chunks[i].getBuffer();
+      chunk = chunks[i];
+      if (chunk == null) {
+        bytesArr[i] = null;
+        continue;
+      }
+
+      buffer = chunk.getBuffer();
       if (buffer.hasArray()) {
         bytesArr[i] = buffer.array();
       } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
index 7403e35..d491570 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/AbstractErasureCoder.java
@@ -90,11 +90,7 @@ public abstract class AbstractErasureCoder
       throw new RuntimeException("Failed to create raw coder", e);
     }
 
-    if (fact != null) {
-      return isEncoder ? fact.createEncoder() : fact.createDecoder();
-    }
-
-    return null;
+    return isEncoder ? fact.createEncoder() : fact.createDecoder();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
index fc664a5..ec7cbb5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/RSErasureDecoder.java
@@ -17,13 +17,11 @@
  */
 package org.apache.hadoop.io.erasurecode.coder;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.erasurecode.ECBlock;
 import org.apache.hadoop.io.erasurecode.ECBlockGroup;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
-import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
 
 /**
  * Reed-Solomon erasure decoder that decodes a block group.
@@ -32,38 +30,14 @@ import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
  */
 public class RSErasureDecoder extends AbstractErasureDecoder {
   private RawErasureDecoder rsRawDecoder;
-  private RawErasureDecoder xorRawDecoder;
-  private boolean useXorWhenPossible = true;
 
   @Override
-  public void setConf(Configuration conf) {
-    super.setConf(conf);
-
-    if (conf != null) {
-      this.useXorWhenPossible = conf.getBoolean(
-          CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_USEXOR_KEY, true);
-    }
-  }
-
-    @Override
   protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
 
-    RawErasureDecoder rawDecoder;
-
     ECBlock[] inputBlocks = getInputBlocks(blockGroup);
     ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
 
-    /**
-     * Optimization: according to some benchmark, when only one block is erased
-     * and to be recovering, the most simple XOR scheme can be much efficient.
-     * We will have benchmark tests to verify this opt is effect or not.
-     */
-    if (outputBlocks.length == 1 && useXorWhenPossible) {
-      rawDecoder = checkCreateXorRawDecoder();
-    } else {
-      rawDecoder = checkCreateRSRawDecoder();
-    }
-
+    RawErasureDecoder rawDecoder = checkCreateRSRawDecoder();
     return new ErasureDecodingStep(inputBlocks,
         getErasedIndexes(inputBlocks), outputBlocks, rawDecoder);
   }
@@ -81,19 +55,9 @@ public class RSErasureDecoder extends AbstractErasureDecoder {
     return rsRawDecoder;
   }
 
-  private RawErasureDecoder checkCreateXorRawDecoder() {
-    if (xorRawDecoder == null) {
-      xorRawDecoder = new XORRawDecoder();
-      xorRawDecoder.initialize(getNumDataUnits(), 1, getChunkSize());
-    }
-    return xorRawDecoder;
-  }
-
   @Override
   public void release() {
-    if (xorRawDecoder != null) {
-      xorRawDecoder.release();
-    } else if (rsRawDecoder != null) {
+    if (rsRawDecoder != null) {
       rsRawDecoder.release();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
index e6f3d92..eed9035 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureCoder.java
@@ -19,6 +19,9 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.conf.Configured;
 
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
 /**
  * A common class of basic facilities to be shared by encoder and decoder
  *
@@ -27,6 +30,9 @@ import org.apache.hadoop.conf.Configured;
 public abstract class AbstractRawErasureCoder
     extends Configured implements RawErasureCoder {
 
+  // Hope to reset coding buffers a little faster using it
+  protected byte[] zeroChunkBytes;
+
   private int numDataUnits;
   private int numParityUnits;
   private int chunkSize;
@@ -37,6 +43,8 @@ public abstract class AbstractRawErasureCoder
     this.numDataUnits = numDataUnits;
     this.numParityUnits = numParityUnits;
     this.chunkSize = chunkSize;
+
+    zeroChunkBytes = new byte[chunkSize]; // With ZERO by default
   }
 
   @Override
@@ -55,7 +63,7 @@ public abstract class AbstractRawErasureCoder
   }
 
   @Override
-  public boolean preferNativeBuffer() {
+  public boolean preferDirectBuffer() {
     return false;
   }
 
@@ -63,4 +71,29 @@ public abstract class AbstractRawErasureCoder
   public void release() {
     // Nothing to do by default
   }
+  /**
+   * Ensure the buffer (either input or output) ready to read or write with ZERO
+   * bytes fully in chunkSize.
+   * @param buffer
+   * @return the buffer itself
+   */
+  protected ByteBuffer resetDirectBuffer(ByteBuffer buffer) {
+    buffer.clear();
+    buffer.put(zeroChunkBytes);
+    buffer.position(0);
+
+    return buffer;
+  }
+
+  /**
+   * Ensure the buffer (either input or output) ready to read or write with ZERO
+   * bytes fully in chunkSize.
+   * @param buffer bytes array buffer
+   * @return the buffer itself
+   */
+  protected byte[] resetArrayBuffer(byte[] buffer) {
+    System.arraycopy(zeroChunkBytes, 0, buffer, 0, buffer.length);
+
+    return buffer;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
index 4613b25..17f867a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureDecoder.java
@@ -32,9 +32,7 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
   @Override
   public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
                      ByteBuffer[] outputs) {
-    if (erasedIndexes.length == 0) {
-      return;
-    }
+    checkParameters(inputs, erasedIndexes, outputs);
 
     doDecode(inputs, erasedIndexes, outputs);
   }
@@ -50,9 +48,7 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
 
   @Override
   public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) {
-    if (erasedIndexes.length == 0) {
-      return;
-    }
+    checkParameters(inputs, erasedIndexes, outputs);
 
     doDecode(inputs, erasedIndexes, outputs);
   }
@@ -69,20 +65,12 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
   @Override
   public void decode(ECChunk[] inputs, int[] erasedIndexes,
                      ECChunk[] outputs) {
-    doDecode(inputs, erasedIndexes, outputs);
-  }
+    checkParameters(inputs, erasedIndexes, outputs);
 
-  /**
-   * Perform the real decoding using chunks
-   * @param inputs
-   * @param erasedIndexes
-   * @param outputs
-   */
-  protected void doDecode(ECChunk[] inputs, int[] erasedIndexes,
-                          ECChunk[] outputs) {
-    if (inputs[0].getBuffer().hasArray()) {
-      byte[][] inputBytesArr = ECChunk.toArray(inputs);
-      byte[][] outputBytesArr = ECChunk.toArray(outputs);
+    boolean hasArray = inputs[0].getBuffer().hasArray();
+    if (hasArray) {
+      byte[][] inputBytesArr = ECChunk.toArrays(inputs);
+      byte[][] outputBytesArr = ECChunk.toArrays(outputs);
       doDecode(inputBytesArr, erasedIndexes, outputBytesArr);
     } else {
       ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputs);
@@ -90,4 +78,29 @@ public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
       doDecode(inputBuffers, erasedIndexes, outputBuffers);
     }
   }
+  /**
+   * Check and validate decoding parameters, throw exception accordingly. The
+   * checking assumes it's a MDS code. Other code  can override this.
+   * @param inputs
+   * @param erasedIndexes
+   * @param outputs
+   */
+  protected void checkParameters(Object[] inputs, int[] erasedIndexes,
+                                 Object[] outputs) {
+    if (inputs.length != getNumParityUnits() + getNumDataUnits()) {
+      throw new IllegalArgumentException("Invalid inputs length");
+    }
+
+    if (erasedIndexes.length != outputs.length) {
+      throw new IllegalArgumentException(
+          "erasedIndexes and outputs mismatch in length");
+    }
+
+    if (erasedIndexes.length > getNumParityUnits()) {
+      throw new IllegalArgumentException(
+          "Too many erased, not recoverable");
+    }
+
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
index 4feaf39..51695e8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractRawErasureEncoder.java
@@ -31,8 +31,7 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
 
   @Override
   public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
-    assert (inputs.length == getNumDataUnits());
-    assert (outputs.length == getNumParityUnits());
+    checkParameters(inputs, outputs);
 
     doEncode(inputs, outputs);
   }
@@ -46,8 +45,7 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
 
   @Override
   public void encode(byte[][] inputs, byte[][] outputs) {
-    assert (inputs.length == getNumDataUnits());
-    assert (outputs.length == getNumParityUnits());
+    checkParameters(inputs, outputs);
 
     doEncode(inputs, outputs);
   }
@@ -61,33 +59,37 @@ public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
 
   @Override
   public void encode(ECChunk[] inputs, ECChunk[] outputs) {
-    assert (inputs.length == getNumDataUnits());
-    assert (outputs.length == getNumParityUnits());
+    checkParameters(inputs, outputs);
 
-    doEncode(inputs, outputs);
-  }
-
-  /**
-   * Perform the real encoding work using chunks.
-   * @param inputs
-   * @param outputs
-   */
-  protected void doEncode(ECChunk[] inputs, ECChunk[] outputs) {
     /**
-     * Note callers may pass byte array, or ByteBuffer via ECChunk according
+     * Note callers may pass byte array, or direct buffer via ECChunk according
      * to how ECChunk is created. Some implementations of coder use byte array
-     * (ex: pure Java), some use native ByteBuffer (ex: ISA-L), all for the
-     * better performance.
+     * (ex: pure Java), some use direct buffer (ex: ISA-L), all for the better
+     * performance.
      */
-    if (inputs[0].getBuffer().hasArray()) {
-      byte[][] inputBytesArr = ECChunk.toArray(inputs);
-      byte[][] outputBytesArr = ECChunk.toArray(outputs);
-      doEncode(inputBytesArr, outputBytesArr);
+    boolean hasArray = inputs[0].getBuffer().hasArray();
+    if (hasArray) {
+      byte[][] inputBytesArr = ECChunk.toArrays(inputs);
+      byte[][] outputBytesArr = ECChunk.toArrays(outputs);
+      encode(inputBytesArr, outputBytesArr);
     } else {
       ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputs);
       ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputs);
-      doEncode(inputBuffers, outputBuffers);
+      encode(inputBuffers, outputBuffers);
     }
   }
 
+  /**
+   * Check and validate decoding parameters, throw exception accordingly.
+   * @param inputs
+   * @param outputs
+   */
+  protected void checkParameters(Object[] inputs, Object[] outputs) {
+    if (inputs.length != getNumDataUnits()) {
+      throw new IllegalArgumentException("Invalid inputs length");
+    }
+    if (outputs.length != getNumParityUnits()) {
+      throw new IllegalArgumentException("Invalid outputs length");
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
index 9af5b6c..3fb211f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
@@ -64,13 +64,13 @@ public interface RawErasureCoder extends Configurable {
   public int getChunkSize();
 
   /**
-   * Tell if native or off-heap buffer is preferred or not. It's for callers to
-   * decide how to allocate coding chunk buffers, either on heap or off heap.
-   * It will return false by default.
+   * Tell if direct buffer is preferred or not. It's for callers to
+   * decide how to allocate coding chunk buffers, using DirectByteBuffer or
+   * bytes array. It will return false by default.
    * @return true if native buffer is preferred for performance consideration,
    * otherwise false.
    */
-  public boolean preferNativeBuffer();
+  public boolean preferDirectBuffer();
 
   /**
    * Should be called when release this coder. Good chance to release encoding

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
index 1358b7d..1807da7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
@@ -31,24 +31,30 @@ import java.nio.ByteBuffer;
 public interface RawErasureDecoder extends RawErasureCoder {
 
   /**
-   * Decode with inputs and erasedIndexes, generates outputs
-   * @param inputs
-   * @param outputs
+   * Decode with inputs and erasedIndexes, generates outputs.
+   * @param inputs inputs to read data from
+   * @param erasedIndexes indexes of erased units in the inputs array
+   * @param outputs outputs to write into for data generated according to
+   *                erasedIndexes
    */
   public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
                      ByteBuffer[] outputs);
 
   /**
-   * Decode with inputs and erasedIndexes, generates outputs
-   * @param inputs
-   * @param outputs
+   * Decode with inputs and erasedIndexes, generates outputs.
+   * @param inputs inputs to read data from
+   * @param erasedIndexes indexes of erased units in the inputs array
+   * @param outputs outputs to write into for data generated according to
+   *                erasedIndexes
    */
   public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs);
 
   /**
-   * Decode with inputs and erasedIndexes, generates outputs
-   * @param inputs
-   * @param outputs
+   * Decode with inputs and erasedIndexes, generates outputs.
+   * @param inputs inputs to read data from
+   * @param erasedIndexes indexes of erased units in the inputs array
+   * @param outputs outputs to write into for data generated according to
+   *                erasedIndexes
    */
   public void decode(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
index b6b1633..7c11681 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
@@ -27,17 +27,11 @@ public class XORRawDecoder extends AbstractRawErasureDecoder {
   @Override
   protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
                           ByteBuffer[] outputs) {
-    assert(erasedIndexes.length == outputs.length);
-    assert(erasedIndexes.length <= 1);
+    resetDirectBuffer(outputs[0]);
 
-    int bufSize = inputs[0].remaining();
+    int bufSize = getChunkSize();
     int erasedIdx = erasedIndexes[0];
 
-    // Set the output to zeros.
-    for (int j = 0; j < bufSize; j++) {
-      outputs[0].put(j, (byte) 0);
-    }
-
     // Process the inputs.
     for (int i = 0; i < inputs.length; i++) {
       // Skip the erased location.
@@ -52,19 +46,13 @@ public class XORRawDecoder extends AbstractRawErasureDecoder {
   }
 
   @Override
-  protected void doDecode(byte[][] inputs, int[] erasedIndexes,
-                          byte[][] outputs) {
-    assert(erasedIndexes.length == outputs.length);
-    assert(erasedIndexes.length <= 1);
+  protected void doDecode(byte[][] inputs,
+                          int[] erasedIndexes, byte[][] outputs) {
+    resetArrayBuffer(outputs[0]);
 
-    int bufSize = inputs[0].length;
+    int bufSize = getChunkSize();
     int erasedIdx = erasedIndexes[0];
 
-    // Set the output to zeros.
-    for (int j = 0; j < bufSize; j++) {
-      outputs[0][j] = 0;
-    }
-
     // Process the inputs.
     for (int i = 0; i < inputs.length; i++) {
       // Skip the erased location.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
index dbfab5d..c751a73 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
@@ -26,8 +26,9 @@ public class XORRawEncoder extends AbstractRawErasureEncoder {
 
   @Override
   protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
-    int bufSize = inputs[0].remaining();
+    resetDirectBuffer(outputs[0]);
 
+    int bufSize = getChunkSize();
     // Get the first buffer's data.
     for (int j = 0; j < bufSize; j++) {
       outputs[0].put(j, inputs[0].get(j));
@@ -43,8 +44,9 @@ public class XORRawEncoder extends AbstractRawErasureEncoder {
 
   @Override
   protected void doEncode(byte[][] inputs, byte[][] outputs) {
-    int bufSize = inputs[0].length;
+    resetArrayBuffer(outputs[0]);
 
+    int bufSize = getChunkSize();
     // Get the first buffer's data.
     for (int j = 0; j < bufSize; j++) {
       outputs[0][j] = inputs[0][j];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
index 22fd98d..9c88665 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
@@ -75,8 +75,8 @@ public abstract class TestCoderBase {
    */
   protected void compareAndVerify(ECChunk[] erasedChunks,
                                   ECChunk[] recoveredChunks) {
-    byte[][] erased = ECChunk.toArray(erasedChunks);
-    byte[][] recovered = ECChunk.toArray(recoveredChunks);
+    byte[][] erased = ECChunk.toArrays(erasedChunks);
+    byte[][] recovered = ECChunk.toArrays(recoveredChunks);
     boolean result = Arrays.deepEquals(erased, recovered);
     assertTrue("Decoding and comparing failed.", result);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9929c01b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
index 3507dd2..6b3e742 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
@@ -59,9 +59,9 @@ public class TestRSErasureCoder extends TestErasureCoderBase {
     Configuration conf = new Configuration();
     conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
         RSRawErasureCoderFactory.class.getCanonicalName());
-    conf.setBoolean(
-        CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_USEXOR_KEY, false);
+
     prepare(conf, 10, 4, null);
+
     testCoding(true);
   }