hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aengin...@apache.org
Subject [30/31] hadoop git commit: HADOOP-13010. Refactor raw erasure coders. Contributed by Kai Zheng
Date Fri, 27 May 2016 04:40:04 GMT
http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
index 5b9e0e9..d7f78ab 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.CoderUtil;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.GF256;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
@@ -34,7 +34,7 @@ import java.util.Arrays;
  * from HDFS-RAID, and also compatible with the native/ISA-L coder.
  */
 @InterfaceAudience.Private
-public class RSRawDecoder extends AbstractRawErasureDecoder {
+public class RSRawDecoder extends RawErasureDecoder {
   //relevant to schema and won't change during decode calls
   private byte[] encodeMatrix;
 
@@ -54,52 +54,54 @@ public class RSRawDecoder extends AbstractRawErasureDecoder {
   private int numErasedDataUnits;
   private boolean[] erasureFlags;
 
-  public RSRawDecoder(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
-    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
+  public RSRawDecoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+
+    int numAllUnits = getNumAllUnits();
+    if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
       throw new HadoopIllegalArgumentException(
               "Invalid getNumDataUnits() and numParityUnits");
     }
 
-    int numAllUnits = getNumDataUnits() + numParityUnits;
     encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
     RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
-    if (isAllowingVerboseDump()) {
-      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
+    if (allowVerboseDump()) {
+      DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), numAllUnits);
     }
   }
 
   @Override
-  protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
-                          ByteBuffer[] outputs) {
-    prepareDecoding(inputs, erasedIndexes);
+  protected void doDecode(ByteBufferDecodingState decodingState) {
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.decodeLength);
+    prepareDecoding(decodingState.inputs, decodingState.erasedIndexes);
 
     ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
     for (int i = 0; i < getNumDataUnits(); i++) {
-      realInputs[i] = inputs[validIndexes[i]];
+      realInputs[i] = decodingState.inputs[validIndexes[i]];
     }
-    RSUtil.encodeData(gfTables, realInputs, outputs);
+    RSUtil.encodeData(gfTables, realInputs, decodingState.outputs);
   }
 
   @Override
-  protected void doDecode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, int[] erasedIndexes,
-                          byte[][] outputs, int[] outputOffsets) {
-    prepareDecoding(inputs, erasedIndexes);
+  protected void doDecode(ByteArrayDecodingState decodingState) {
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.outputOffsets, dataLen);
+    prepareDecoding(decodingState.inputs, decodingState.erasedIndexes);
 
     byte[][] realInputs = new byte[getNumDataUnits()][];
     int[] realInputOffsets = new int[getNumDataUnits()];
     for (int i = 0; i < getNumDataUnits(); i++) {
-      realInputs[i] = inputs[validIndexes[i]];
-      realInputOffsets[i] = inputOffsets[validIndexes[i]];
+      realInputs[i] = decodingState.inputs[validIndexes[i]];
+      realInputOffsets[i] = decodingState.inputOffsets[validIndexes[i]];
     }
     RSUtil.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
-            outputs, outputOffsets);
+        decodingState.outputs, decodingState.outputOffsets);
   }
 
   private <T> void prepareDecoding(T[] inputs, int[] erasedIndexes) {
-    int[] tmpValidIndexes = new int[getNumDataUnits()];
-    CoderUtil.makeValidIndexes(inputs, tmpValidIndexes);
+    int[] tmpValidIndexes = CoderUtil.getValidIndexes(inputs);
     if (Arrays.equals(this.cachedErasedIndexes, erasedIndexes) &&
         Arrays.equals(this.validIndexes, tmpValidIndexes)) {
       return; // Optimization. Nothing to do
@@ -132,7 +134,7 @@ public class RSRawDecoder extends AbstractRawErasureDecoder {
 
     RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
         decodeMatrix, 0, gfTables);
-    if (isAllowingVerboseDump()) {
+    if (allowVerboseDump()) {
       System.out.println(DumpUtil.bytesToHex(gfTables, -1));
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
index 7c8fa59..0183760 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.CoderUtil;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
 
 import java.nio.ByteBuffer;
@@ -34,7 +34,7 @@ import java.nio.ByteBuffer;
  * addressed in HADOOP-11871.
  */
 @InterfaceAudience.Private
-public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
+public class RSRawDecoderLegacy extends RawErasureDecoder {
   // To describe and calculate the needed Vandermonde matrix
   private int[] errSignature;
   private int[] primitivePower;
@@ -61,16 +61,16 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
   private ByteBuffer[] adjustedDirectBufferOutputsParameter =
       new ByteBuffer[getNumParityUnits()];
 
-  public RSRawDecoderLegacy(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
-    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
+  public RSRawDecoderLegacy(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+    if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
       throw new HadoopIllegalArgumentException(
               "Invalid numDataUnits and numParityUnits");
     }
 
-    this.errSignature = new int[numParityUnits];
-    this.primitivePower = RSUtil.getPrimitivePower(numDataUnits,
-        numParityUnits);
+    this.errSignature = new int[getNumParityUnits()];
+    this.primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
+        getNumParityUnits());
   }
 
   @Override
@@ -129,16 +129,18 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
   }
 
   @Override
-  protected void doDecode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, int[] erasedIndexes,
-                          byte[][] outputs, int[] outputOffsets) {
+  protected void doDecode(ByteArrayDecodingState decodingState) {
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.outputOffsets, dataLen);
+
     /**
      * As passed parameters are friendly to callers but not to the underlying
      * implementations, so we have to adjust them before calling doDecodeImpl.
      */
 
     int[] erasedOrNotToReadIndexes =
-        CoderUtil.getErasedOrNotToReadIndexes(inputs);
+        CoderUtil.getNullIndexes(decodingState.inputs);
 
     // Prepare for adjustedOutputsParameter
 
@@ -148,16 +150,18 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
       adjustedOutputOffsets[i] = 0;
     }
     // Use the caller passed buffers in erasedIndexes positions
-    for (int outputIdx = 0, i = 0; i < erasedIndexes.length; i++) {
+    for (int outputIdx = 0, i = 0;
+         i < decodingState.erasedIndexes.length; i++) {
       boolean found = false;
       for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
         // If this index is one requested by the caller via erasedIndexes, then
         // we use the passed output buffer to avoid copying data thereafter.
-        if (erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
+        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
           found = true;
-          adjustedByteArrayOutputsParameter[j] = resetBuffer(
-                  outputs[outputIdx], outputOffsets[outputIdx], dataLen);
-          adjustedOutputOffsets[j] = outputOffsets[outputIdx];
+          adjustedByteArrayOutputsParameter[j] = CoderUtil.resetBuffer(
+              decodingState.outputs[outputIdx],
+              decodingState.outputOffsets[outputIdx], dataLen);
+          adjustedOutputOffsets[j] = decodingState.outputOffsets[outputIdx];
           outputIdx++;
         }
       }
@@ -169,22 +173,22 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
     // Use shared buffers for other positions (not set yet)
     for (int bufferIdx = 0, i = 0; i < erasedOrNotToReadIndexes.length; i++) {
       if (adjustedByteArrayOutputsParameter[i] == null) {
-        adjustedByteArrayOutputsParameter[i] = resetBuffer(
+        adjustedByteArrayOutputsParameter[i] = CoderUtil.resetBuffer(
             checkGetBytesArrayBuffer(bufferIdx, dataLen), 0, dataLen);
         adjustedOutputOffsets[i] = 0; // Always 0 for such temp output
         bufferIdx++;
       }
     }
 
-    doDecodeImpl(inputs, inputOffsets, dataLen, erasedOrNotToReadIndexes,
+    doDecodeImpl(decodingState.inputs, decodingState.inputOffsets,
+        dataLen, erasedOrNotToReadIndexes,
         adjustedByteArrayOutputsParameter, adjustedOutputOffsets);
   }
 
   @Override
-  protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
-                          ByteBuffer[] outputs) {
-    ByteBuffer validInput = CoderUtil.findFirstValidInput(inputs);
-    int dataLen = validInput.remaining();
+  protected void doDecode(ByteBufferDecodingState decodingState) {
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs, dataLen);
 
     /**
      * As passed parameters are friendly to callers but not to the underlying
@@ -192,7 +196,7 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
      */
 
     int[] erasedOrNotToReadIndexes =
-        CoderUtil.getErasedOrNotToReadIndexes(inputs);
+        CoderUtil.getNullIndexes(decodingState.inputs);
 
     // Prepare for adjustedDirectBufferOutputsParameter
 
@@ -201,15 +205,16 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
       adjustedDirectBufferOutputsParameter[i] = null;
     }
     // Use the caller passed buffers in erasedIndexes positions
-    for (int outputIdx = 0, i = 0; i < erasedIndexes.length; i++) {
+    for (int outputIdx = 0, i = 0;
+         i < decodingState.erasedIndexes.length; i++) {
       boolean found = false;
       for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
         // If this index is one requested by the caller via erasedIndexes, then
         // we use the passed output buffer to avoid copying data thereafter.
-        if (erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
+        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
           found = true;
-          adjustedDirectBufferOutputsParameter[j] =
-              resetBuffer(outputs[outputIdx++], dataLen);
+          adjustedDirectBufferOutputsParameter[j] = CoderUtil.resetBuffer(
+              decodingState.outputs[outputIdx++], dataLen);
         }
       }
       if (!found) {
@@ -223,12 +228,13 @@ public class RSRawDecoderLegacy extends AbstractRawErasureDecoder {
         ByteBuffer buffer = checkGetDirectBuffer(bufferIdx, dataLen);
         buffer.position(0);
         buffer.limit(dataLen);
-        adjustedDirectBufferOutputsParameter[i] = resetBuffer(buffer, dataLen);
+        adjustedDirectBufferOutputsParameter[i] =
+            CoderUtil.resetBuffer(buffer, dataLen);
         bufferIdx++;
       }
     }
 
-    doDecodeImpl(inputs, erasedOrNotToReadIndexes,
+    doDecodeImpl(decodingState.inputs, erasedOrNotToReadIndexes,
         adjustedDirectBufferOutputsParameter);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
index cee6574..fd82363 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
@@ -19,11 +19,10 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
 
-import java.nio.ByteBuffer;
-
 /**
  * A raw erasure encoder in RS code scheme in pure Java in case native one
  * isn't available in some environment. Please always use native implementations
@@ -31,7 +30,7 @@ import java.nio.ByteBuffer;
  * from HDFS-RAID, and also compatible with the native/ISA-L coder.
  */
 @InterfaceAudience.Private
-public class RSRawEncoder extends AbstractRawErasureEncoder {
+public class RSRawEncoder extends RawErasureEncoder {
   // relevant to schema and won't change during encode calls.
   private byte[] encodeMatrix;
   /**
@@ -40,36 +39,42 @@ public class RSRawEncoder extends AbstractRawErasureEncoder {
    */
   private byte[] gfTables;
 
-  public RSRawEncoder(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
+  public RSRawEncoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
 
-    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
+    if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
       throw new HadoopIllegalArgumentException(
           "Invalid numDataUnits and numParityUnits");
     }
 
-    encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
-    RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
-    if (isAllowingVerboseDump()) {
-      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
+    encodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
+    RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), getNumDataUnits());
+    if (allowVerboseDump()) {
+      DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), getNumAllUnits());
     }
-    gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
-    RSUtil.initTables(numDataUnits, numParityUnits, encodeMatrix,
-        numDataUnits * numDataUnits, gfTables);
-    if (isAllowingVerboseDump()) {
+    gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
+    RSUtil.initTables(getNumDataUnits(), getNumParityUnits(), encodeMatrix,
+        getNumDataUnits() * getNumDataUnits(), gfTables);
+    if (allowVerboseDump()) {
       System.out.println(DumpUtil.bytesToHex(gfTables, -1));
     }
   }
 
   @Override
-  protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
-    RSUtil.encodeData(gfTables, inputs, outputs);
+  protected void doEncode(ByteBufferEncodingState encodingState) {
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.encodeLength);
+    RSUtil.encodeData(gfTables, encodingState.inputs, encodingState.outputs);
   }
 
   @Override
-  protected void doEncode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, byte[][] outputs, int[] outputOffsets) {
-    RSUtil.encodeData(gfTables, dataLen, inputs, inputOffsets, outputs,
-        outputOffsets);
+  protected void doEncode(ByteArrayEncodingState encodingState) {
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.outputOffsets,
+        encodingState.encodeLength);
+    RSUtil.encodeData(gfTables, encodingState.encodeLength,
+        encodingState.inputs,
+        encodingState.inputOffsets, encodingState.outputs,
+        encodingState.outputOffsets);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
index 8057720..ed1c83b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
 
 import java.nio.ByteBuffer;
@@ -29,20 +30,20 @@ import java.util.Arrays;
  * when possible.
  */
 @InterfaceAudience.Private
-public class RSRawEncoderLegacy extends AbstractRawErasureEncoder {
+public class RSRawEncoderLegacy extends RawErasureEncoder {
   private int[] generatingPolynomial;
 
-  public RSRawEncoderLegacy(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
+  public RSRawEncoderLegacy(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
 
     assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
 
-    int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,
-        numParityUnits);
+    int[] primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
+        getNumParityUnits());
     // compute generating polynomial
     int[] gen = {1};
     int[] poly = new int[2];
-    for (int i = 0; i < numParityUnits; i++) {
+    for (int i = 0; i < getNumParityUnits(); i++) {
       poly[0] = primitivePower[i];
       poly[1] = 1;
       gen = RSUtil.GF.multiply(gen, poly);
@@ -52,15 +53,21 @@ public class RSRawEncoderLegacy extends AbstractRawErasureEncoder {
   }
 
   @Override
-  protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
+  protected void doEncode(ByteBufferEncodingState encodingState) {
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.encodeLength);
     // parity units + data units
-    ByteBuffer[] all = new ByteBuffer[outputs.length + inputs.length];
+    ByteBuffer[] all = new ByteBuffer[encodingState.outputs.length +
+        encodingState.inputs.length];
 
-    if (isAllowingChangeInputs()) {
-      System.arraycopy(outputs, 0, all, 0, outputs.length);
-      System.arraycopy(inputs, 0, all, outputs.length, inputs.length);
+    if (allowChangeInputs()) {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.inputs, 0, all,
+          encodingState.outputs.length, encodingState.inputs.length);
     } else {
-      System.arraycopy(outputs, 0, all, 0, outputs.length);
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
 
       /**
        * Note when this coder would be really (rarely) used in a production
@@ -68,11 +75,11 @@ public class RSRawEncoderLegacy extends AbstractRawErasureEncoder {
        * buffers avoiding reallocating.
        */
       ByteBuffer tmp;
-      for (int i = 0; i < inputs.length; i++) {
-        tmp = ByteBuffer.allocate(inputs[i].remaining());
-        tmp.put(inputs[i]);
+      for (int i = 0; i < encodingState.inputs.length; i++) {
+        tmp = ByteBuffer.allocate(encodingState.inputs[i].remaining());
+        tmp.put(encodingState.inputs[i]);
         tmp.flip();
-        all[outputs.length + i] = tmp;
+        all[encodingState.outputs.length + i] = tmp;
       }
     }
 
@@ -81,27 +88,38 @@ public class RSRawEncoderLegacy extends AbstractRawErasureEncoder {
   }
 
   @Override
-  protected void doEncode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, byte[][] outputs,
-                          int[] outputOffsets) {
+  protected void doEncode(ByteArrayEncodingState encodingState) {
+    int dataLen = encodingState.encodeLength;
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.outputOffsets, dataLen);
     // parity units + data units
-    byte[][] all = new byte[outputs.length + inputs.length][];
-    int[] allOffsets = new int[outputOffsets.length + inputOffsets.length];
+    byte[][] all = new byte[encodingState.outputs.length +
+        encodingState.inputs.length][];
+    int[] allOffsets = new int[encodingState.outputOffsets.length +
+        encodingState.inputOffsets.length];
 
-    if (isAllowingChangeInputs()) {
-      System.arraycopy(outputs, 0, all, 0, outputs.length);
-      System.arraycopy(inputs, 0, all, outputs.length, inputs.length);
+    if (allowChangeInputs()) {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.inputs, 0, all,
+          encodingState.outputs.length, encodingState.inputs.length);
 
-      System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);
-      System.arraycopy(inputOffsets, 0, allOffsets,
-          outputOffsets.length, inputOffsets.length);
+      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
+          encodingState.outputOffsets.length);
+      System.arraycopy(encodingState.inputOffsets, 0, allOffsets,
+          encodingState.outputOffsets.length,
+          encodingState.inputOffsets.length);
     } else {
-      System.arraycopy(outputs, 0, all, 0, outputs.length);
-      System.arraycopy(outputOffsets, 0, allOffsets, 0, outputOffsets.length);
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
+          encodingState.outputOffsets.length);
 
-      for (int i = 0; i < inputs.length; i++) {
-        all[outputs.length + i] = Arrays.copyOfRange(inputs[i],
-            inputOffsets[i], inputOffsets[i] + dataLen);
+      for (int i = 0; i < encodingState.inputs.length; i++) {
+        all[encodingState.outputs.length + i] =
+            Arrays.copyOfRange(encodingState.inputs[i],
+            encodingState.inputOffsets[i],
+                encodingState.inputOffsets[i] + dataLen);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
index b38db4b..8d954d5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 /**
  * A raw coder factory for the new raw Reed-Solomon coder in Java.
@@ -26,12 +27,12 @@ import org.apache.hadoop.classification.InterfaceAudience;
 public class RSRawErasureCoderFactory implements RawErasureCoderFactory {
 
   @Override
-  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
-    return new RSRawEncoder(numDataUnits, numParityUnits);
+  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
+    return new RSRawEncoder(coderOptions);
   }
 
   @Override
-  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
-    return new RSRawDecoder(numDataUnits, numParityUnits);
+  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
+    return new RSRawDecoder(coderOptions);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
index 5aa75e4..f0ebb3b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 /**
  * A raw coder factory for the legacy raw Reed-Solomon coder in Java.
@@ -26,12 +27,12 @@ import org.apache.hadoop.classification.InterfaceAudience;
 public class RSRawErasureCoderFactoryLegacy implements RawErasureCoderFactory {
 
   @Override
-  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
-    return new RSRawEncoderLegacy(numDataUnits, numParityUnits);
+  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
+    return new RSRawEncoderLegacy(coderOptions);
   }
 
   @Override
-  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
-    return new RSRawDecoderLegacy(numDataUnits, numParityUnits);
+  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
+    return new RSRawDecoderLegacy(coderOptions);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
deleted file mode 100644
index 20a1a69..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoder.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.conf.Configurable;
-
-/**
- * RawErasureCoder is a common interface for {@link RawErasureEncoder} and
- * {@link RawErasureDecoder} as both encoder and decoder share some properties.
- *
- * RawErasureCoder is part of ErasureCodec framework, where ErasureCoder is
- * used to encode/decode a group of blocks (BlockGroup) according to the codec
- * specific BlockGroup layout and logic. An ErasureCoder extracts chunks of
- * data from the blocks and can employ various low level RawErasureCoders to
- * perform encoding/decoding against the chunks.
- *
- * To distinguish from ErasureCoder, here RawErasureCoder is used to mean the
- * low level constructs, since it only takes care of the math calculation with
- * a group of byte buffers.
- */
-@InterfaceAudience.Private
-public interface RawErasureCoder extends Configurable {
-
-  /**
-   * Get a coder option value.
-   * @param option
-   * @return option value
-   */
-  public Object getCoderOption(CoderOption option);
-
-  /**
-   * Set a coder option value.
-   * @param option
-   * @param value
-   */
-  public void setCoderOption(CoderOption option, Object value);
-
-  /**
-   * The number of data input units for the coding. A unit can be a byte,
-   * chunk or buffer or even a block.
-   * @return count of data input units
-   */
-  public int getNumDataUnits();
-
-  /**
-   * The number of parity output units for the coding. A unit can be a byte,
-   * chunk, buffer or even a block.
-   * @return count of parity output units
-   */
-  public int getNumParityUnits();
-
-  /**
-   * Should be called when release this coder. Good chance to release encoding
-   * or decoding buffers
-   */
-  public void release();
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
index 8a12106..6d94f00 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 /**
  * Raw erasure coder factory that can be used to create raw encoder and decoder.
@@ -29,17 +30,15 @@ public interface RawErasureCoderFactory {
 
   /**
    * Create raw erasure encoder.
-   * @param numDataUnits number of data units in a coding group
-   * @param numParityUnits number of parity units in a coding group
+   * @param conf the configuration used to create the encoder
    * @return raw erasure encoder
    */
-  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits);
+  RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions);
 
   /**
    * Create raw erasure decoder.
-   * @param numDataUnits number of data units in a coding group
-   * @param numParityUnits number of parity units in a coding group
+   * @param conf the configuration used to create the encoder
    * @return raw erasure decoder
    */
-  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits);
+  RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions);
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
index 1707650..a29b472 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
@@ -19,18 +19,34 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.erasurecode.ECChunk;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 import java.nio.ByteBuffer;
 
 /**
- * RawErasureDecoder performs decoding given chunks of input data and generates
- * missing data that corresponds to an erasure code scheme, like XOR and
- * Reed-Solomon.
+ * An abstract raw erasure decoder that's to be inherited by new decoders.
  *
- * It extends the {@link RawErasureCoder} interface.
+ * Raw erasure coder is part of erasure codec framework, where erasure coder is
+ * used to encode/decode a group of blocks (BlockGroup) according to the codec
+ * specific BlockGroup layout and logic. An erasure coder extracts chunks of
+ * data from the blocks and can employ various low level raw erasure coders to
+ * perform encoding/decoding against the chunks.
+ *
+ * To distinguish from erasure coder, here raw erasure coder is used to mean the
+ * low level constructs, since it only takes care of the math calculation with
+ * a group of byte buffers.
+ *
+ * Note it mainly provides decode() calls, which should be stateless and may be
+ * made thread-safe in future.
  */
 @InterfaceAudience.Private
-public interface RawErasureDecoder extends RawErasureCoder {
+public abstract class RawErasureDecoder {
+
+  private final ErasureCoderOptions coderOptions;
+
+  public RawErasureDecoder(ErasureCoderOptions coderOptions) {
+    this.coderOptions = coderOptions;
+  }
 
   /**
    * Decode with inputs and erasedIndexes, generates outputs.
@@ -64,8 +80,44 @@ public interface RawErasureDecoder extends RawErasureCoder {
    * @param outputs output buffers to put decoded data into according to
    *                erasedIndexes, ready for read after the call
    */
-  void decode(ByteBuffer[] inputs, int[] erasedIndexes,
-                     ByteBuffer[] outputs);
+  public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
+                     ByteBuffer[] outputs) {
+    ByteBufferDecodingState decodingState = new ByteBufferDecodingState(this,
+        inputs, erasedIndexes, outputs);
+
+    boolean usingDirectBuffer = decodingState.usingDirectBuffer;
+    int dataLen = decodingState.decodeLength;
+    if (dataLen == 0) {
+      return;
+    }
+
+    int[] inputPositions = new int[inputs.length];
+    for (int i = 0; i < inputPositions.length; i++) {
+      if (inputs[i] != null) {
+        inputPositions[i] = inputs[i].position();
+      }
+    }
+
+    if (usingDirectBuffer) {
+      doDecode(decodingState);
+    } else {
+      ByteArrayDecodingState badState = decodingState.convertToByteArrayState();
+      doDecode(badState);
+    }
+
+    for (int i = 0; i < inputs.length; i++) {
+      if (inputs[i] != null) {
+        // dataLen bytes consumed
+        inputs[i].position(inputPositions[i] + dataLen);
+      }
+    }
+  }
+
+  /**
+   * Perform the real decoding using Direct ByteBuffer.
+   * @param decodingState the decoding state
+   */
+  protected abstract void doDecode(ByteBufferDecodingState decodingState);
 
   /**
    * Decode with inputs and erasedIndexes, generates outputs. More see above.
@@ -75,7 +127,23 @@ public interface RawErasureDecoder extends RawErasureCoder {
    * @param outputs output buffers to put decoded data into according to
    *                erasedIndexes, ready for read after the call
    */
-  void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs);
+  public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) {
+    ByteArrayDecodingState decodingState = new ByteArrayDecodingState(this,
+        inputs, erasedIndexes, outputs);
+
+    if (decodingState.decodeLength == 0) {
+      return;
+    }
+
+    doDecode(decodingState);
+  }
+
+  /**
+   * Perform the real decoding using bytes array, supporting offsets and
+   * lengths.
+   * @param decodingState the decoding state
+   */
+  protected abstract void doDecode(ByteArrayDecodingState decodingState);
 
   /**
    * Decode with inputs and erasedIndexes, generates outputs. More see above.
@@ -88,6 +156,57 @@ public interface RawErasureDecoder extends RawErasureCoder {
    * @param outputs output buffers to put decoded data into according to
    *                erasedIndexes, ready for read after the call
    */
-  void decode(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs);
+  public void decode(ECChunk[] inputs, int[] erasedIndexes,
+                     ECChunk[] outputs) {
+    ByteBuffer[] newInputs = CoderUtil.toBuffers(inputs);
+    ByteBuffer[] newOutputs = CoderUtil.toBuffers(outputs);
+    decode(newInputs, erasedIndexes, newOutputs);
+  }
+
+  public int getNumDataUnits() {
+    return coderOptions.getNumDataUnits();
+  }
+
+  public int getNumParityUnits() {
+    return coderOptions.getNumParityUnits();
+  }
+
+  protected int getNumAllUnits() {
+    return coderOptions.getNumAllUnits();
+  }
+
+  /**
+   * Tell if direct buffer is preferred or not. It's for callers to
+   * decide how to allocate coding chunk buffers, using DirectByteBuffer or
+   * bytes array. It will return false by default.
+   * @return true if native buffer is preferred for performance consideration,
+   * otherwise false.
+   */
+  public boolean preferDirectBuffer() {
+    return false;
+  }
 
+  /**
+   * Allow change into input buffers or not while perform encoding/decoding.
+   * @return true if it's allowed to change inputs, false otherwise
+   */
+  public boolean allowChangeInputs() {
+    return coderOptions.allowChangeInputs();
+  }
+
+  /**
+   * Allow to dump verbose info during encoding/decoding.
+   * @return true if it's allowed to do verbose dump, false otherwise.
+   */
+  public boolean allowVerboseDump() {
+    return coderOptions.allowVerboseDump();
+  }
+
+  /**
+   * Should be called when release this coder. Good chance to release encoding
+   * or decoding buffers
+   */
+  public void release() {
+    // Nothing to do here.
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
index 6303d82..36d68f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
@@ -19,18 +19,34 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.erasurecode.ECChunk;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 import java.nio.ByteBuffer;
 
 /**
- * RawErasureEncoder performs encoding given chunks of input data and generates
- * parity outputs that corresponds to an erasure code scheme, like XOR and
- * Reed-Solomon.
+ * An abstract raw erasure encoder that's to be inherited by new encoders.
  *
- * It extends the {@link RawErasureCoder} interface.
+ * Raw erasure coder is part of erasure codec framework, where erasure coder is
+ * used to encode/decode a group of blocks (BlockGroup) according to the codec
+ * specific BlockGroup layout and logic. An erasure coder extracts chunks of
+ * data from the blocks and can employ various low level raw erasure coders to
+ * perform encoding/decoding against the chunks.
+ *
+ * To distinguish from erasure coder, here raw erasure coder is used to mean the
+ * low level constructs, since it only takes care of the math calculation with
+ * a group of byte buffers.
+ *
+ * Note it mainly provides encode() calls, which should be stateless and may be
+ * made thread-safe in future.
  */
 @InterfaceAudience.Private
-public interface RawErasureEncoder extends RawErasureCoder {
+public abstract class RawErasureEncoder {
+
+  private final ErasureCoderOptions coderOptions;
+
+  public RawErasureEncoder(ErasureCoderOptions coderOptions) {
+    this.coderOptions = coderOptions;
+  }
 
   /**
    * Encode with inputs and generates outputs.
@@ -47,7 +63,43 @@ public interface RawErasureEncoder extends RawErasureCoder {
    * @param outputs output buffers to put the encoded data into, ready to read
    *                after the call
    */
-  void encode(ByteBuffer[] inputs, ByteBuffer[] outputs);
+  public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
+    ByteBufferEncodingState bbeState = new ByteBufferEncodingState(
+        this, inputs, outputs);
+
+    boolean usingDirectBuffer = bbeState.usingDirectBuffer;
+    int dataLen = bbeState.encodeLength;
+    if (dataLen == 0) {
+      return;
+    }
+
+    int[] inputPositions = new int[inputs.length];
+    for (int i = 0; i < inputPositions.length; i++) {
+      if (inputs[i] != null) {
+        inputPositions[i] = inputs[i].position();
+      }
+    }
+
+    if (usingDirectBuffer) {
+      doEncode(bbeState);
+    } else {
+      ByteArrayEncodingState baeState = bbeState.convertToByteArrayState();
+      doEncode(baeState);
+    }
+
+    for (int i = 0; i < inputs.length; i++) {
+      if (inputs[i] != null) {
+        // dataLen bytes consumed
+        inputs[i].position(inputPositions[i] + dataLen);
+      }
+    }
+  }
+
+  /**
+   * Perform the real encoding work using direct ByteBuffer.
+   * @param encodingState the encoding state
+   */
+  protected abstract void doEncode(ByteBufferEncodingState encodingState);
 
   /**
    * Encode with inputs and generates outputs. More see above.
@@ -56,7 +108,24 @@ public interface RawErasureEncoder extends RawErasureCoder {
    * @param outputs output buffers to put the encoded data into, read to read
    *                after the call
    */
-  void encode(byte[][] inputs, byte[][] outputs);
+  public void encode(byte[][] inputs, byte[][] outputs) {
+    ByteArrayEncodingState baeState = new ByteArrayEncodingState(
+        this, inputs, outputs);
+
+    int dataLen = baeState.encodeLength;
+    if (dataLen == 0) {
+      return;
+    }
+
+    doEncode(baeState);
+  }
+
+  /**
+   * Perform the real encoding work using bytes array, supporting offsets
+   * and lengths.
+   * @param encodingState the encoding state
+   */
+  protected abstract void doEncode(ByteArrayEncodingState encodingState);
 
   /**
    * Encode with inputs and generates outputs. More see above.
@@ -65,6 +134,56 @@ public interface RawErasureEncoder extends RawErasureCoder {
    * @param outputs output buffers to put the encoded data into, read to read
    *                after the call
    */
-  void encode(ECChunk[] inputs, ECChunk[] outputs);
+  public void encode(ECChunk[] inputs, ECChunk[] outputs) {
+    ByteBuffer[] newInputs = ECChunk.toBuffers(inputs);
+    ByteBuffer[] newOutputs = ECChunk.toBuffers(outputs);
+    encode(newInputs, newOutputs);
+  }
+
+  public int getNumDataUnits() {
+    return coderOptions.getNumDataUnits();
+  }
+
+  public int getNumParityUnits() {
+    return coderOptions.getNumParityUnits();
+  }
+
+  public int getNumAllUnits() {
+    return coderOptions.getNumAllUnits();
+  }
+
+  /**
+   * Tell if direct buffer is preferred or not. It's for callers to
+   * decide how to allocate coding chunk buffers, using DirectByteBuffer or
+   * bytes array. It will return false by default.
+   * @return true if native buffer is preferred for performance consideration,
+   * otherwise false.
+   */
+  public boolean preferDirectBuffer() {
+    return false;
+  }
 
+  /**
+   * Allow change into input buffers or not while perform encoding/decoding.
+   * @return true if it's allowed to change inputs, false otherwise
+   */
+  public boolean allowChangeInputs() {
+    return coderOptions.allowChangeInputs();
+  }
+
+  /**
+   * Allow to dump verbose info during encoding/decoding.
+   * @return true if it's allowed to do verbose dump, false otherwise.
+   */
+  public boolean allowVerboseDump() {
+    return coderOptions.allowVerboseDump();
+  }
+
+  /**
+   * Should be called when release this coder. Good chance to release encoding
+   * or decoding buffers
+   */
+  public void release() {
+    // Nothing to do here.
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
index 61017dd..ef7b172 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawDecoder.java
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import java.nio.ByteBuffer;
-
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+import java.nio.ByteBuffer;
 
 /**
  * A raw decoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
@@ -29,55 +30,57 @@ import org.apache.hadoop.classification.InterfaceAudience;
  * deployed independently.
  */
 @InterfaceAudience.Private
-public class XORRawDecoder extends AbstractRawErasureDecoder {
+public class XORRawDecoder extends RawErasureDecoder {
 
-  public XORRawDecoder(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
+  public XORRawDecoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
   }
 
   @Override
-  protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
-                          ByteBuffer[] outputs) {
-    ByteBuffer output = outputs[0];
+  protected void doDecode(ByteBufferDecodingState decodingState) {
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.decodeLength);
+    ByteBuffer output = decodingState.outputs[0];
 
-    int erasedIdx = erasedIndexes[0];
+    int erasedIdx = decodingState.erasedIndexes[0];
 
     // Process the inputs.
     int iIdx, oIdx;
-    for (int i = 0; i < inputs.length; i++) {
+    for (int i = 0; i < decodingState.inputs.length; i++) {
       // Skip the erased location.
       if (i == erasedIdx) {
         continue;
       }
 
-      for (iIdx = inputs[i].position(), oIdx = output.position();
-           iIdx < inputs[i].limit();
+      for (iIdx = decodingState.inputs[i].position(), oIdx = output.position();
+           iIdx < decodingState.inputs[i].limit();
            iIdx++, oIdx++) {
-        output.put(oIdx, (byte) (output.get(oIdx) ^ inputs[i].get(iIdx)));
+        output.put(oIdx, (byte) (output.get(oIdx) ^
+            decodingState.inputs[i].get(iIdx)));
       }
     }
   }
 
   @Override
-  protected void doDecode(byte[][] inputs, int[] inputOffsets, int dataLen,
-                          int[] erasedIndexes, byte[][] outputs,
-                          int[] outputOffsets) {
-    byte[] output = outputs[0];
-    resetBuffer(output, outputOffsets[0], dataLen);
-
-    int erasedIdx = erasedIndexes[0];
+  protected void doDecode(ByteArrayDecodingState decodingState) {
+    byte[] output = decodingState.outputs[0];
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.outputOffsets, dataLen);
+    int erasedIdx = decodingState.erasedIndexes[0];
 
     // Process the inputs.
     int iIdx, oIdx;
-    for (int i = 0; i < inputs.length; i++) {
+    for (int i = 0; i < decodingState.inputs.length; i++) {
       // Skip the erased location.
       if (i == erasedIdx) {
         continue;
       }
 
-      for (iIdx = inputOffsets[i], oIdx = outputOffsets[0];
-           iIdx < inputOffsets[i] + dataLen; iIdx++, oIdx++) {
-        output[oIdx] ^= inputs[i][iIdx];
+      for (iIdx = decodingState.inputOffsets[i],
+               oIdx = decodingState.outputOffsets[0];
+           iIdx < decodingState.inputOffsets[i] + dataLen; iIdx++, oIdx++) {
+        output[oIdx] ^= decodingState.inputs[i][iIdx];
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
index 646fc17..409ba9d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawEncoder.java
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import java.nio.ByteBuffer;
-
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+import java.nio.ByteBuffer;
 
 /**
  * A raw encoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
@@ -29,50 +30,56 @@ import org.apache.hadoop.classification.InterfaceAudience;
  * deployed independently.
  */
 @InterfaceAudience.Private
-public class XORRawEncoder extends AbstractRawErasureEncoder {
+public class XORRawEncoder extends RawErasureEncoder {
 
-  public XORRawEncoder(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
+  public XORRawEncoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
   }
 
-  protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
-    ByteBuffer output = outputs[0];
+  protected void doEncode(ByteBufferEncodingState encodingState) {
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.encodeLength);
+    ByteBuffer output = encodingState.outputs[0];
 
     // Get the first buffer's data.
     int iIdx, oIdx;
-    for (iIdx = inputs[0].position(), oIdx = output.position();
-         iIdx < inputs[0].limit(); iIdx++, oIdx++) {
-      output.put(oIdx, inputs[0].get(iIdx));
+    for (iIdx = encodingState.inputs[0].position(), oIdx = output.position();
+         iIdx < encodingState.inputs[0].limit(); iIdx++, oIdx++) {
+      output.put(oIdx, encodingState.inputs[0].get(iIdx));
     }
 
     // XOR with everything else.
-    for (int i = 1; i < inputs.length; i++) {
-      for (iIdx = inputs[i].position(), oIdx = output.position();
-           iIdx < inputs[i].limit();
+    for (int i = 1; i < encodingState.inputs.length; i++) {
+      for (iIdx = encodingState.inputs[i].position(), oIdx = output.position();
+           iIdx < encodingState.inputs[i].limit();
            iIdx++, oIdx++) {
-        output.put(oIdx, (byte) (output.get(oIdx) ^ inputs[i].get(iIdx)));
+        output.put(oIdx, (byte) (output.get(oIdx) ^
+            encodingState.inputs[i].get(iIdx)));
       }
     }
   }
 
   @Override
-  protected void doEncode(byte[][] inputs, int[] inputOffsets, int dataLen,
-                          byte[][] outputs, int[] outputOffsets) {
-    byte[] output = outputs[0];
-    resetBuffer(output, outputOffsets[0], dataLen);
+  protected void doEncode(ByteArrayEncodingState encodingState) {
+    int dataLen = encodingState.encodeLength;
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.outputOffsets, dataLen);
+    byte[] output = encodingState.outputs[0];
 
     // Get the first buffer's data.
     int iIdx, oIdx;
-    for (iIdx = inputOffsets[0], oIdx = outputOffsets[0];
-         iIdx < inputOffsets[0] + dataLen; iIdx++, oIdx++) {
-      output[oIdx] = inputs[0][iIdx];
+    for (iIdx = encodingState.inputOffsets[0],
+             oIdx = encodingState.outputOffsets[0];
+         iIdx < encodingState.inputOffsets[0] + dataLen; iIdx++, oIdx++) {
+      output[oIdx] = encodingState.inputs[0][iIdx];
     }
 
     // XOR with everything else.
-    for (int i = 1; i < inputs.length; i++) {
-      for (iIdx = inputOffsets[i], oIdx = outputOffsets[0];
-           iIdx < inputOffsets[i] + dataLen; iIdx++, oIdx++) {
-        output[oIdx] ^= inputs[i][iIdx];
+    for (int i = 1; i < encodingState.inputs.length; i++) {
+      for (iIdx = encodingState.inputOffsets[i],
+               oIdx = encodingState.outputOffsets[0];
+           iIdx < encodingState.inputOffsets[i] + dataLen; iIdx++, oIdx++) {
+        output[oIdx] ^= encodingState.inputs[i][iIdx];
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawErasureCoderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawErasureCoderFactory.java
index 3123690..571fe12 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawErasureCoderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/XORRawErasureCoderFactory.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 
 /**
  * A raw coder factory for raw XOR coder.
@@ -26,12 +27,12 @@ import org.apache.hadoop.classification.InterfaceAudience;
 public class XORRawErasureCoderFactory implements RawErasureCoderFactory {
 
   @Override
-  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
-    return new XORRawEncoder(numDataUnits, numParityUnits);
+  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
+    return new XORRawEncoder(coderOptions);
   }
 
   @Override
-  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
-    return new XORRawDecoder(numDataUnits, numParityUnits);
+  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
+    return new XORRawDecoder(coderOptions);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/package-info.java
new file mode 100644
index 0000000..034cdf2
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/package-info.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ * Raw erasure coders.
+ *
+ * Raw erasure coder is part of erasure codec framework, where erasure coder is
+ * used to encode/decode a group of blocks (BlockGroup) according to the codec
+ * specific BlockGroup layout and logic. An erasure coder extracts chunks of
+ * data from the blocks and can employ various low level raw erasure coders to
+ * perform encoding/decoding against the chunks.
+ *
+ * To distinguish from erasure coder, here raw erasure coder is used to mean the
+ * low level constructs, since it only takes care of the math calculation with
+ * a group of byte buffers.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/CoderUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/CoderUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/CoderUtil.java
deleted file mode 100644
index 07d15be..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/CoderUtil.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder.util;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.classification.InterfaceAudience;
-
-import java.util.Arrays;
-
-/**
- * Helpful utilities for implementing some raw erasure coders.
- */
-@InterfaceAudience.Private
-public final class CoderUtil {
-
-  private CoderUtil() {
-    // No called
-  }
-
-
-  /**
-   * Get indexes into inputs array for items marked as null, either erased or
-   * not to read.
-   * @return indexes into inputs array
-   */
-  public static <T> int[] getErasedOrNotToReadIndexes(T[] inputs) {
-    int[] invalidIndexes = new int[inputs.length];
-    int idx = 0;
-    for (int i = 0; i < inputs.length; i++) {
-      if (inputs[i] == null) {
-        invalidIndexes[idx++] = i;
-      }
-    }
-
-    return Arrays.copyOf(invalidIndexes, idx);
-  }
-
-  /**
-   * Find the valid input from all the inputs.
-   * @param inputs input buffers to look for valid input
-   * @return the first valid input
-   */
-  public static <T> T findFirstValidInput(T[] inputs) {
-    for (T input : inputs) {
-      if (input != null) {
-        return input;
-      }
-    }
-
-    throw new HadoopIllegalArgumentException(
-        "Invalid inputs are found, all being null");
-  }
-
-  /**
-   * Picking up indexes of valid inputs.
-   * @param inputs actually decoding input buffers
-   * @param validIndexes an array to be filled and returned
-   * @param <T>
-   */
-  public static <T> void makeValidIndexes(T[] inputs, int[] validIndexes) {
-    int idx = 0;
-    for (int i = 0; i < inputs.length && idx < validIndexes.length; i++) {
-      if (inputs[i] != null) {
-        validIndexes[idx++] = i;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
index fdb47be..96a6408 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder.util;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 import java.nio.ByteBuffer;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-
 /**
  * Implementation of Galois field arithmetic with 2^p elements. The input must
  * be unsigned integers. It's ported from HDFS-RAID, slightly adapted.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
index 5075966..81dc458 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
@@ -46,37 +46,42 @@ public class TestCodecRawCoderMapping {
 
   @Test
   public void testRSDefaultRawCoder() {
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        numDataUnit, numParityUnit);
     // should return default raw coder of rs-default codec
-    RawErasureEncoder encoder = CodecUtil.createRSRawEncoder(
-        conf, numDataUnit, numParityUnit);
+    RawErasureEncoder encoder = CodecUtil.createRawEncoder(
+        conf, ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
     Assert.assertTrue(encoder instanceof RSRawEncoder);
-    RawErasureDecoder decoder = CodecUtil.createRSRawDecoder(
-        conf, numDataUnit, numParityUnit);
+    RawErasureDecoder decoder = CodecUtil.createRawDecoder(
+        conf, ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
     Assert.assertTrue(decoder instanceof RSRawDecoder);
 
     // should return default raw coder of rs-legacy codec
-    encoder = CodecUtil.createRSRawEncoder(conf, numDataUnit, numParityUnit,
-        ErasureCodeConstants.RS_LEGACY_CODEC_NAME);
+    encoder = CodecUtil.createRawEncoder(conf,
+        ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
     Assert.assertTrue(encoder instanceof RSRawEncoderLegacy);
-    decoder = CodecUtil.createRSRawDecoder(conf, numDataUnit, numParityUnit,
-        ErasureCodeConstants.RS_LEGACY_CODEC_NAME);
+    decoder = CodecUtil.createRawDecoder(conf,
+        ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
     Assert.assertTrue(decoder instanceof RSRawDecoderLegacy);
   }
 
   @Test
   public void testDedicatedRawCoderKey() {
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        numDataUnit, numParityUnit);
+
     String dummyFactName = "DummyNoneExistingFactory";
     // set the dummy factory to rs-legacy and create a raw coder
     // with rs-default, which is OK as the raw coder key is not used
     conf.set(CommonConfigurationKeys.
         IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY, dummyFactName);
-    RawErasureEncoder encoder = CodecUtil.createRSRawEncoder(conf, numDataUnit,
-        numParityUnit, ErasureCodeConstants.RS_DEFAULT_CODEC_NAME);
+    RawErasureEncoder encoder = CodecUtil.createRawEncoder(conf,
+        ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
     Assert.assertTrue(encoder instanceof RSRawEncoder);
     // now create the raw coder with rs-legacy, which should throw exception
     try {
-      CodecUtil.createRSRawEncoder(conf, numDataUnit, numParityUnit,
-          ErasureCodeConstants.RS_LEGACY_CODEC_NAME);
+      CodecUtil.createRawEncoder(conf,
+          ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
       Assert.fail();
     } catch (Exception e) {
       GenericTestUtils.assertExceptionContains("Failed to create raw coder", e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
index 633e064..6d14de8 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
@@ -35,7 +35,7 @@ import static org.junit.Assert.assertTrue;
 public abstract class TestCoderBase {
   protected static Random RAND = new Random();
 
-  private boolean allowDump = true;
+  protected boolean allowDump = true;
 
   private Configuration conf;
   protected int numDataUnits;
@@ -90,13 +90,8 @@ public abstract class TestCoderBase {
     }
   }
 
-  /**
-   * Set true during setup if want to dump test settings and coding data,
-   * useful in debugging.
-   * @param allowDump
-   */
-  protected void setAllowDump(boolean allowDump) {
-    this.allowDump = allowDump;
+  protected boolean isAllowDump() {
+    return allowDump;
   }
 
   /**
@@ -502,7 +497,8 @@ public abstract class TestCoderBase {
       sb.append(" erasedParityIndexes=").
               append(Arrays.toString(erasedParityIndexes));
       sb.append(" usingDirectBuffer=").append(usingDirectBuffer);
-      sb.append(" isAllowingChangeInputs=").append(allowChangeInputs);
+      sb.append(" allowChangeInputs=").append(allowChangeInputs);
+      sb.append(" allowVerboseDump=").append(allowDump);
       sb.append("\n");
 
       System.out.println(sb.toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
index 63a2ac8..5be9b4e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
@@ -50,7 +50,7 @@ public class TestDummyRawCoder extends TestRawCoderBase {
   @Override
   protected void testCoding(boolean usingDirectBuffer) {
     this.usingDirectBuffer = usingDirectBuffer;
-    prepareCoders();
+    prepareCoders(true);
 
     prepareBufferAllocator(true);
     setAllowChangeInputs(false);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
index cf77539..32f0e00 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.io.erasurecode.ECChunk;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.TestCoderBase;
 import org.junit.Assert;
 import org.junit.Test;
@@ -62,7 +63,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    */
   protected void testCoding(boolean usingDirectBuffer) {
     this.usingDirectBuffer = usingDirectBuffer;
-    prepareCoders();
+    prepareCoders(true);
 
     /**
      * The following runs will use 3 different chunkSize for inputs and outputs,
@@ -79,7 +80,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    */
   protected void testCodingWithBadInput(boolean usingDirectBuffer) {
     this.usingDirectBuffer = usingDirectBuffer;
-    prepareCoders();
+    prepareCoders(true);
 
     try {
       performTestCoding(baseChunkSize, false, true, false, true);
@@ -95,7 +96,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    */
   protected void testCodingWithBadOutput(boolean usingDirectBuffer) {
     this.usingDirectBuffer = usingDirectBuffer;
-    prepareCoders();
+    prepareCoders(true);
 
     try {
       performTestCoding(baseChunkSize, false, false, true, true);
@@ -189,16 +190,23 @@ public abstract class TestRawCoderBase extends TestCoderBase {
 
   protected void setAllowChangeInputs(boolean allowChangeInputs) {
     this.allowChangeInputs = allowChangeInputs;
-    encoder.setCoderOption(CoderOption.ALLOW_CHANGE_INPUTS, allowChangeInputs);
-    decoder.setCoderOption(CoderOption.ALLOW_CHANGE_INPUTS, allowChangeInputs);
   }
 
-  protected void prepareCoders() {
-    if (encoder == null) {
+  /**
+   * Set true during setup if want to dump test settings and coding data,
+   * useful in debugging.
+   * @param allowDump
+   */
+  protected void setAllowDump(boolean allowDump) {
+    this.allowDump = allowDump;
+  }
+
+  protected void prepareCoders(boolean recreate) {
+    if (encoder == null || recreate) {
       encoder = createEncoder();
     }
 
-    if (decoder == null) {
+    if (decoder == null || recreate) {
       decoder = createDecoder();
     }
   }
@@ -222,18 +230,16 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    * @return
    */
   protected RawErasureEncoder createEncoder() {
-    RawErasureEncoder encoder;
+    ErasureCoderOptions coderConf =
+        new ErasureCoderOptions(numDataUnits, numParityUnits,
+            allowChangeInputs, allowDump);
     try {
       Constructor<? extends RawErasureEncoder> constructor =
-              (Constructor<? extends RawErasureEncoder>)
-                      encoderClass.getConstructor(int.class, int.class);
-      encoder = constructor.newInstance(numDataUnits, numParityUnits);
+          encoderClass.getConstructor(ErasureCoderOptions.class);
+      return constructor.newInstance(coderConf);
     } catch (Exception e) {
       throw new RuntimeException("Failed to create encoder", e);
     }
-
-    encoder.setConf(getConf());
-    return encoder;
   }
 
   /**
@@ -241,18 +247,16 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    * @return
    */
   protected RawErasureDecoder createDecoder() {
-    RawErasureDecoder decoder;
+    ErasureCoderOptions coderConf =
+        new ErasureCoderOptions(numDataUnits, numParityUnits,
+            allowChangeInputs, allowDump);
     try {
       Constructor<? extends RawErasureDecoder> constructor =
-              (Constructor<? extends RawErasureDecoder>)
-                      decoderClass.getConstructor(int.class, int.class);
-      decoder = constructor.newInstance(numDataUnits, numParityUnits);
+          decoderClass.getConstructor(ErasureCoderOptions.class);
+      return constructor.newInstance(coderConf);
     } catch (Exception e) {
       throw new RuntimeException("Failed to create decoder", e);
     }
-
-    decoder.setConf(getConf());
-    return decoder;
   }
 
   /**
@@ -261,7 +265,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
    */
   protected void testInputPosition(boolean usingDirectBuffer) {
     this.usingDirectBuffer = usingDirectBuffer;
-    prepareCoders();
+    prepareCoders(true);
     prepareBufferAllocator(false);
 
     // verify encode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
index 1944782..1bdbc32 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
@@ -38,6 +38,7 @@ import static org.apache.hadoop.hdfs.util.StripedBlockUtil.StripingChunkReadResu
 import org.apache.hadoop.io.erasurecode.CodecUtil;
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
 
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.util.DirectBufferPool;
 
@@ -184,8 +185,10 @@ public class DFSStripedInputStream extends DFSInputStream {
     curStripeRange = new StripeRange(0, 0);
     readingService =
         new ExecutorCompletionService<>(dfsClient.getStripedReadsThreadPool());
-    decoder = CodecUtil.createRSRawDecoder(dfsClient.getConfiguration(),
-        dataBlkNum, parityBlkNum, ecPolicy.getCodecName());
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        dataBlkNum, parityBlkNum);
+    decoder = CodecUtil.createRawDecoder(dfsClient.getConfiguration(),
+        ecPolicy.getCodecName(), coderOptions);
     if (DFSClient.LOG.isDebugEnabled()) {
       DFSClient.LOG.debug("Creating an striped input stream for file " + src);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java
index 403e50f..85dc749 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.hdfs.util.StripedBlockUtil;
 import org.apache.hadoop.io.MultipleIOException;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
@@ -286,8 +287,10 @@ public class DFSStripedOutputStream extends DFSOutputStream {
     flushAllExecutorCompletionService = new
         ExecutorCompletionService<>(flushAllExecutor);
 
-    encoder = CodecUtil.createRSRawEncoder(dfsClient.getConfiguration(),
-        numDataBlocks, numParityBlocks, ecPolicy.getCodecName());
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        numDataBlocks, numParityBlocks);
+    encoder = CodecUtil.createRawEncoder(dfsClient.getConfiguration(),
+        ecPolicy.getCodecName(), coderOptions);
 
     coordinator = new Coordinator(numAllBlocks);
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
index c80bf96..47a6979 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand.BlockECReconstructionInfo;
 import org.apache.hadoop.hdfs.util.StripedBlockUtil;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.DataChecksum;
@@ -215,8 +216,10 @@ class StripedReconstructor implements Runnable {
   // Initialize decoder
   private void initDecoderIfNecessary() {
     if (decoder == null) {
-      decoder = CodecUtil.createRSRawDecoder(conf, ecPolicy.getNumDataUnits(),
-          ecPolicy.getNumParityUnits(), ecPolicy.getCodecName());
+      ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+          ecPolicy.getNumDataUnits(), ecPolicy.getNumParityUnits());
+      decoder = CodecUtil.createRawDecoder(conf, ecPolicy.getCodecName(),
+          coderOptions);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
index 6dcccc3..4c3afdd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.util.StripedBlockUtil;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem.WebHdfsInputStream;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
 import org.junit.Assert;
 
@@ -491,9 +492,12 @@ public class StripedFileTestUtil {
         System.arraycopy(tmp, 0, dataBytes[i], 0, tmp.length);
       }
     }
+
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        dataBytes.length, parityBytes.length);
     final RawErasureEncoder encoder =
-        CodecUtil.createRSRawEncoder(conf, dataBytes.length, parityBytes.length,
-            TEST_EC_POLICY.getCodecName());
+        CodecUtil.createRawEncoder(conf, TEST_EC_POLICY.getCodecName(),
+            coderOptions);
     encoder.encode(dataBytes, expectedParityBytes);
     for (int i = 0; i < parityBytes.length; i++) {
       if (checkSet.contains(i + dataBytes.length)){

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77202fa1/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java
index e4f7ac0..a02a8d6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStream.java
@@ -20,30 +20,25 @@ package org.apache.hadoop.hdfs;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
 import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
 import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
 import org.apache.hadoop.hdfs.util.StripedBlockUtil;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
-import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Test;
 import org.junit.Rule;
+import org.junit.Test;
 import org.junit.rules.Timeout;
 
 import java.io.IOException;
@@ -51,6 +46,12 @@ import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class TestDFSStripedInputStream {
 
   public static final Log LOG = LogFactory.getLog(TestDFSStripedInputStream.class);
@@ -217,8 +218,10 @@ public class TestDFSStripedInputStream {
       }
     }
 
-    RawErasureDecoder rawDecoder = CodecUtil.createRSRawDecoder(conf,
-        DATA_BLK_NUM, PARITY_BLK_NUM, ecPolicy.getCodecName());
+    ErasureCoderOptions coderOptions = new ErasureCoderOptions(
+        DATA_BLK_NUM, PARITY_BLK_NUM);
+    RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(conf,
+        ecPolicy.getCodecName(), coderOptions);
 
     // Update the expected content for decoded data
     int[] missingBlkIdx = new int[PARITY_BLK_NUM];


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message