hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From su...@apache.org
Subject [04/50] [abbrv] hadoop git commit: HADOOP-14261. Some refactoring work for erasure coding raw coder. Contributed by Lin Zeng.
Date Tue, 25 Apr 2017 02:01:24 GMT
HADOOP-14261. Some refactoring work for erasure coding raw coder. Contributed by Lin Zeng.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a22fe02f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a22fe02f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a22fe02f

Branch: refs/heads/YARN-2915
Commit: a22fe02fba66280a8e994282e9ead23d9e20669a
Parents: b080338
Author: Andrew Wang <wang@apache.org>
Authored: Fri Apr 21 11:35:30 2017 -0700
Committer: Andrew Wang <wang@apache.org>
Committed: Fri Apr 21 11:35:36 2017 -0700

----------------------------------------------------------------------
 .../apache/hadoop/io/erasurecode/CodecUtil.java |   4 +-
 .../rawcoder/RSLegacyRawDecoder.java            | 270 +++++++++++++++++++
 .../rawcoder/RSLegacyRawEncoder.java            | 129 +++++++++
 .../RSLegacyRawErasureCoderFactory.java         |  38 +++
 .../rawcoder/RSRawDecoderLegacy.java            | 270 -------------------
 .../rawcoder/RSRawEncoderLegacy.java            | 129 ---------
 .../RSRawErasureCoderFactoryLegacy.java         |  38 ---
 .../src/main/resources/core-default.xml         |   2 +-
 .../erasurecode/TestCodecRawCoderMapping.java   |  12 +-
 .../rawcoder/RawErasureCoderBenchmark.java      |   2 +-
 .../erasurecode/rawcoder/TestDummyRawCoder.java |   4 +-
 .../rawcoder/TestNativeRSRawCoder.java          |   4 +-
 .../rawcoder/TestNativeXORRawCoder.java         |   4 +-
 .../rawcoder/TestRSLegacyRawCoder.java          |  33 +++
 .../io/erasurecode/rawcoder/TestRSRawCoder.java |   4 +-
 .../rawcoder/TestRSRawCoderInteroperable1.java  |   4 +-
 .../rawcoder/TestRSRawCoderInteroperable2.java  |   4 +-
 .../rawcoder/TestRSRawCoderLegacy.java          |  33 ---
 .../erasurecode/rawcoder/TestRawCoderBase.java  |  16 +-
 .../erasurecode/rawcoder/TestXORRawCoder.java   |   4 +-
 .../rawcoder/TestXORRawCoderInteroperable1.java |   4 +-
 .../rawcoder/TestXORRawCoderInteroperable2.java |   4 +-
 22 files changed, 504 insertions(+), 508 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
index 0c66df6..c8b6a68 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@@ -80,7 +80,7 @@ public final class CodecUtil {
   public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODERS_KEY =
       IO_ERASURECODE_CODEC + "rs-legacy.rawcoders";
   public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODERS_DEFAULT =
-      RSRawErasureCoderFactoryLegacy.class.getCanonicalName();
+      RSLegacyRawErasureCoderFactory.class.getCanonicalName();
   public static final String IO_ERASURECODE_CODEC_RS_RAWCODERS_KEY =
       IO_ERASURECODE_CODEC + "rs.rawcoders";
   public static final String IO_ERASURECODE_CODEC_RS_RAWCODERS_DEFAULT =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java
new file mode 100644
index 0000000..cfd7d29
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java
@@ -0,0 +1,270 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
+
+import java.nio.ByteBuffer;
+
+/**
+ * A raw erasure decoder in RS code scheme in pure Java in case native one
+ * isn't available in some environment. Please always use native implementations
+ * when possible.
+ *
+ * Currently this implementation will compute and decode not to read units
+ * unnecessarily due to the underlying implementation limit in GF. This will be
+ * addressed in HADOOP-11871.
+ */
+@InterfaceAudience.Private
+public class RSLegacyRawDecoder extends RawErasureDecoder {
+  // To describe and calculate the needed Vandermonde matrix
+  private int[] errSignature;
+  private int[] primitivePower;
+
+  public RSLegacyRawDecoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+    if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
+      throw new HadoopIllegalArgumentException(
+              "Invalid numDataUnits and numParityUnits");
+    }
+
+    this.errSignature = new int[getNumParityUnits()];
+    this.primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
+        getNumParityUnits());
+  }
+
+  @Override
+  public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
+                     ByteBuffer[] outputs) {
+    // Make copies avoiding affecting original ones;
+    ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
+    int[] newErasedIndexes = new int[erasedIndexes.length];
+    ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
+
+    // Adjust the order to match with underlying requirements.
+    adjustOrder(inputs, newInputs,
+        erasedIndexes, newErasedIndexes, outputs, newOutputs);
+
+    super.decode(newInputs, newErasedIndexes, newOutputs);
+  }
+
+  @Override
+  public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) {
+    // Make copies avoiding affecting original ones;
+    byte[][] newInputs = new byte[inputs.length][];
+    int[] newErasedIndexes = new int[erasedIndexes.length];
+    byte[][] newOutputs = new byte[outputs.length][];
+
+    // Adjust the order to match with underlying requirements.
+    adjustOrder(inputs, newInputs,
+        erasedIndexes, newErasedIndexes, outputs, newOutputs);
+
+    super.decode(newInputs, newErasedIndexes, newOutputs);
+  }
+
+  private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
+                          ByteBuffer[] outputs) {
+    ByteBuffer valid = CoderUtil.findFirstValidInput(inputs);
+    int dataLen = valid.remaining();
+    for (int i = 0; i < erasedIndexes.length; i++) {
+      errSignature[i] = primitivePower[erasedIndexes[i]];
+      RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
+    }
+
+    RSUtil.GF.solveVandermondeSystem(errSignature,
+        outputs, erasedIndexes.length);
+  }
+
+  private void doDecodeImpl(byte[][] inputs, int[] inputOffsets,
+                          int dataLen, int[] erasedIndexes,
+                          byte[][] outputs, int[] outputOffsets) {
+    for (int i = 0; i < erasedIndexes.length; i++) {
+      errSignature[i] = primitivePower[erasedIndexes[i]];
+      RSUtil.GF.substitute(inputs, inputOffsets, dataLen, outputs[i],
+          outputOffsets[i], primitivePower[i]);
+    }
+
+    RSUtil.GF.solveVandermondeSystem(errSignature, outputs, outputOffsets,
+        erasedIndexes.length, dataLen);
+  }
+
+  @Override
+  protected void doDecode(ByteArrayDecodingState decodingState) {
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs,
+        decodingState.outputOffsets, dataLen);
+
+    /**
+     * As passed parameters are friendly to callers but not to the underlying
+     * implementations, so we have to adjust them before calling doDecodeImpl.
+     */
+
+    byte[][] bytesArrayBuffers = new byte[getNumParityUnits()][];
+    byte[][] adjustedByteArrayOutputsParameter =
+        new byte[getNumParityUnits()][];
+    int[] adjustedOutputOffsets = new int[getNumParityUnits()];
+
+    int[] erasedOrNotToReadIndexes =
+        CoderUtil.getNullIndexes(decodingState.inputs);
+
+    // Use the caller passed buffers in erasedIndexes positions
+    for (int outputIdx = 0, i = 0;
+         i < decodingState.erasedIndexes.length; i++) {
+      boolean found = false;
+      for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
+        // If this index is one requested by the caller via erasedIndexes, then
+        // we use the passed output buffer to avoid copying data thereafter.
+        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
+          found = true;
+          adjustedByteArrayOutputsParameter[j] = CoderUtil.resetBuffer(
+              decodingState.outputs[outputIdx],
+              decodingState.outputOffsets[outputIdx], dataLen);
+          adjustedOutputOffsets[j] = decodingState.outputOffsets[outputIdx];
+          outputIdx++;
+        }
+      }
+      if (!found) {
+        throw new HadoopIllegalArgumentException(
+            "Inputs not fully corresponding to erasedIndexes in null places");
+      }
+    }
+    // Use shared buffers for other positions (not set yet)
+    for (int bufferIdx = 0, i = 0; i < erasedOrNotToReadIndexes.length; i++) {
+      if (adjustedByteArrayOutputsParameter[i] == null) {
+        adjustedByteArrayOutputsParameter[i] = CoderUtil.resetBuffer(
+            checkGetBytesArrayBuffer(bytesArrayBuffers, bufferIdx, dataLen),
+            0, dataLen);
+        adjustedOutputOffsets[i] = 0; // Always 0 for such temp output
+        bufferIdx++;
+      }
+    }
+
+    doDecodeImpl(decodingState.inputs, decodingState.inputOffsets,
+        dataLen, erasedOrNotToReadIndexes,
+        adjustedByteArrayOutputsParameter, adjustedOutputOffsets);
+  }
+
+  @Override
+  protected void doDecode(ByteBufferDecodingState decodingState) {
+    int dataLen = decodingState.decodeLength;
+    CoderUtil.resetOutputBuffers(decodingState.outputs, dataLen);
+
+    /**
+     * As passed parameters are friendly to callers but not to the underlying
+     * implementations, so we have to adjust them before calling doDecodeImpl.
+     */
+
+    int[] erasedOrNotToReadIndexes =
+        CoderUtil.getNullIndexes(decodingState.inputs);
+
+    ByteBuffer[] directBuffers = new ByteBuffer[getNumParityUnits()];
+    ByteBuffer[] adjustedDirectBufferOutputsParameter =
+        new ByteBuffer[getNumParityUnits()];
+
+    // Use the caller passed buffers in erasedIndexes positions
+    for (int outputIdx = 0, i = 0;
+         i < decodingState.erasedIndexes.length; i++) {
+      boolean found = false;
+      for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
+        // If this index is one requested by the caller via erasedIndexes, then
+        // we use the passed output buffer to avoid copying data thereafter.
+        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
+          found = true;
+          adjustedDirectBufferOutputsParameter[j] = CoderUtil.resetBuffer(
+              decodingState.outputs[outputIdx++], dataLen);
+        }
+      }
+      if (!found) {
+        throw new HadoopIllegalArgumentException(
+            "Inputs not fully corresponding to erasedIndexes in null places");
+      }
+    }
+    // Use shared buffers for other positions (not set yet)
+    for (int bufferIdx = 0, i = 0; i < erasedOrNotToReadIndexes.length; i++) {
+      if (adjustedDirectBufferOutputsParameter[i] == null) {
+        ByteBuffer buffer = checkGetDirectBuffer(
+            directBuffers, bufferIdx, dataLen);
+        buffer.position(0);
+        buffer.limit(dataLen);
+        adjustedDirectBufferOutputsParameter[i] =
+            CoderUtil.resetBuffer(buffer, dataLen);
+        bufferIdx++;
+      }
+    }
+
+    doDecodeImpl(decodingState.inputs, erasedOrNotToReadIndexes,
+        adjustedDirectBufferOutputsParameter);
+  }
+
+  /*
+   * Convert data units first order to parity units first order.
+   */
+  private <T> void adjustOrder(T[] inputs, T[] inputs2,
+                               int[] erasedIndexes, int[] erasedIndexes2,
+                               T[] outputs, T[] outputs2) {
+    // Example:
+    // d0 d1 d2 d3 d4 d5 : p0 p1 p2 => p0 p1 p2 : d0 d1 d2 d3 d4 d5
+    System.arraycopy(inputs, getNumDataUnits(), inputs2,
+        0, getNumParityUnits());
+    System.arraycopy(inputs, 0, inputs2,
+        getNumParityUnits(), getNumDataUnits());
+
+    int numErasedDataUnits = 0, numErasedParityUnits = 0;
+    int idx = 0;
+    for (int i = 0; i < erasedIndexes.length; i++) {
+      if (erasedIndexes[i] >= getNumDataUnits()) {
+        erasedIndexes2[idx++] = erasedIndexes[i] - getNumDataUnits();
+        numErasedParityUnits++;
+      }
+    }
+    for (int i = 0; i < erasedIndexes.length; i++) {
+      if (erasedIndexes[i] < getNumDataUnits()) {
+        erasedIndexes2[idx++] = erasedIndexes[i] + getNumParityUnits();
+        numErasedDataUnits++;
+      }
+    }
+
+    // Copy for data units
+    System.arraycopy(outputs, numErasedDataUnits, outputs2,
+        0, numErasedParityUnits);
+    // Copy for parity units
+    System.arraycopy(outputs, 0, outputs2,
+        numErasedParityUnits, numErasedDataUnits);
+  }
+
+  private static byte[] checkGetBytesArrayBuffer(byte[][] bytesArrayBuffers,
+      int idx, int bufferLen) {
+    if (bytesArrayBuffers[idx] == null ||
+        bytesArrayBuffers[idx].length < bufferLen) {
+      bytesArrayBuffers[idx] = new byte[bufferLen];
+    }
+    return bytesArrayBuffers[idx];
+  }
+
+  private static ByteBuffer checkGetDirectBuffer(ByteBuffer[] directBuffers,
+      int idx, int bufferLen) {
+    if (directBuffers[idx] == null ||
+        directBuffers[idx].capacity() < bufferLen) {
+      directBuffers[idx] = ByteBuffer.allocateDirect(bufferLen);
+    }
+    return directBuffers[idx];
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawEncoder.java
new file mode 100644
index 0000000..7a526c9
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawEncoder.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+/**
+ * A raw erasure encoder in RS code scheme in pure Java in case native one
+ * isn't available in some environment. Please always use native implementations
+ * when possible.
+ */
+@InterfaceAudience.Private
+public class RSLegacyRawEncoder extends RawErasureEncoder {
+  private int[] generatingPolynomial;
+
+  public RSLegacyRawEncoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+
+    assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
+
+    int[] primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
+        getNumParityUnits());
+    // compute generating polynomial
+    int[] gen = {1};
+    int[] poly = new int[2];
+    for (int i = 0; i < getNumParityUnits(); i++) {
+      poly[0] = primitivePower[i];
+      poly[1] = 1;
+      gen = RSUtil.GF.multiply(gen, poly);
+    }
+    // generating polynomial has all generating roots
+    generatingPolynomial = gen;
+  }
+
+  @Override
+  protected void doEncode(ByteBufferEncodingState encodingState) {
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.encodeLength);
+    // parity units + data units
+    ByteBuffer[] all = new ByteBuffer[encodingState.outputs.length +
+        encodingState.inputs.length];
+
+    if (allowChangeInputs()) {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.inputs, 0, all,
+          encodingState.outputs.length, encodingState.inputs.length);
+    } else {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+
+      /**
+       * Note when this coder would be really (rarely) used in a production
+       * system, this can  be optimized to cache and reuse the new allocated
+       * buffers avoiding reallocating.
+       */
+      ByteBuffer tmp;
+      for (int i = 0; i < encodingState.inputs.length; i++) {
+        tmp = ByteBuffer.allocate(encodingState.inputs[i].remaining());
+        tmp.put(encodingState.inputs[i]);
+        tmp.flip();
+        all[encodingState.outputs.length + i] = tmp;
+      }
+    }
+
+    // Compute the remainder
+    RSUtil.GF.remainder(all, generatingPolynomial);
+  }
+
+  @Override
+  protected void doEncode(ByteArrayEncodingState encodingState) {
+    int dataLen = encodingState.encodeLength;
+    CoderUtil.resetOutputBuffers(encodingState.outputs,
+        encodingState.outputOffsets, dataLen);
+    // parity units + data units
+    byte[][] all = new byte[encodingState.outputs.length +
+        encodingState.inputs.length][];
+    int[] allOffsets = new int[encodingState.outputOffsets.length +
+        encodingState.inputOffsets.length];
+
+    if (allowChangeInputs()) {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.inputs, 0, all,
+          encodingState.outputs.length, encodingState.inputs.length);
+
+      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
+          encodingState.outputOffsets.length);
+      System.arraycopy(encodingState.inputOffsets, 0, allOffsets,
+          encodingState.outputOffsets.length,
+          encodingState.inputOffsets.length);
+    } else {
+      System.arraycopy(encodingState.outputs, 0, all, 0,
+          encodingState.outputs.length);
+      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
+          encodingState.outputOffsets.length);
+
+      for (int i = 0; i < encodingState.inputs.length; i++) {
+        all[encodingState.outputs.length + i] =
+            Arrays.copyOfRange(encodingState.inputs[i],
+            encodingState.inputOffsets[i],
+                encodingState.inputOffsets[i] + dataLen);
+      }
+    }
+
+    // Compute the remainder
+    RSUtil.GF.remainder(all, allOffsets, dataLen, generatingPolynomial);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawErasureCoderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawErasureCoderFactory.java
new file mode 100644
index 0000000..45bbf40
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawErasureCoderFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+/**
+ * A raw coder factory for the legacy raw Reed-Solomon coder in Java.
+ */
+@InterfaceAudience.Private
+public class RSLegacyRawErasureCoderFactory implements RawErasureCoderFactory {
+
+  @Override
+  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
+    return new RSLegacyRawEncoder(coderOptions);
+  }
+
+  @Override
+  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
+    return new RSLegacyRawDecoder(coderOptions);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
deleted file mode 100644
index c8deec9..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoderLegacy.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
-
-import java.nio.ByteBuffer;
-
-/**
- * A raw erasure decoder in RS code scheme in pure Java in case native one
- * isn't available in some environment. Please always use native implementations
- * when possible.
- *
- * Currently this implementation will compute and decode not to read units
- * unnecessarily due to the underlying implementation limit in GF. This will be
- * addressed in HADOOP-11871.
- */
-@InterfaceAudience.Private
-public class RSRawDecoderLegacy extends RawErasureDecoder {
-  // To describe and calculate the needed Vandermonde matrix
-  private int[] errSignature;
-  private int[] primitivePower;
-
-  public RSRawDecoderLegacy(ErasureCoderOptions coderOptions) {
-    super(coderOptions);
-    if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
-      throw new HadoopIllegalArgumentException(
-              "Invalid numDataUnits and numParityUnits");
-    }
-
-    this.errSignature = new int[getNumParityUnits()];
-    this.primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
-        getNumParityUnits());
-  }
-
-  @Override
-  public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
-                     ByteBuffer[] outputs) {
-    // Make copies avoiding affecting original ones;
-    ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
-    int[] newErasedIndexes = new int[erasedIndexes.length];
-    ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
-
-    // Adjust the order to match with underlying requirements.
-    adjustOrder(inputs, newInputs,
-        erasedIndexes, newErasedIndexes, outputs, newOutputs);
-
-    super.decode(newInputs, newErasedIndexes, newOutputs);
-  }
-
-  @Override
-  public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) {
-    // Make copies avoiding affecting original ones;
-    byte[][] newInputs = new byte[inputs.length][];
-    int[] newErasedIndexes = new int[erasedIndexes.length];
-    byte[][] newOutputs = new byte[outputs.length][];
-
-    // Adjust the order to match with underlying requirements.
-    adjustOrder(inputs, newInputs,
-        erasedIndexes, newErasedIndexes, outputs, newOutputs);
-
-    super.decode(newInputs, newErasedIndexes, newOutputs);
-  }
-
-  private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
-                          ByteBuffer[] outputs) {
-    ByteBuffer valid = CoderUtil.findFirstValidInput(inputs);
-    int dataLen = valid.remaining();
-    for (int i = 0; i < erasedIndexes.length; i++) {
-      errSignature[i] = primitivePower[erasedIndexes[i]];
-      RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
-    }
-
-    RSUtil.GF.solveVandermondeSystem(errSignature,
-        outputs, erasedIndexes.length);
-  }
-
-  private void doDecodeImpl(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, int[] erasedIndexes,
-                          byte[][] outputs, int[] outputOffsets) {
-    for (int i = 0; i < erasedIndexes.length; i++) {
-      errSignature[i] = primitivePower[erasedIndexes[i]];
-      RSUtil.GF.substitute(inputs, inputOffsets, dataLen, outputs[i],
-          outputOffsets[i], primitivePower[i]);
-    }
-
-    RSUtil.GF.solveVandermondeSystem(errSignature, outputs, outputOffsets,
-        erasedIndexes.length, dataLen);
-  }
-
-  @Override
-  protected void doDecode(ByteArrayDecodingState decodingState) {
-    int dataLen = decodingState.decodeLength;
-    CoderUtil.resetOutputBuffers(decodingState.outputs,
-        decodingState.outputOffsets, dataLen);
-
-    /**
-     * As passed parameters are friendly to callers but not to the underlying
-     * implementations, so we have to adjust them before calling doDecodeImpl.
-     */
-
-    byte[][] bytesArrayBuffers = new byte[getNumParityUnits()][];
-    byte[][] adjustedByteArrayOutputsParameter =
-        new byte[getNumParityUnits()][];
-    int[] adjustedOutputOffsets = new int[getNumParityUnits()];
-
-    int[] erasedOrNotToReadIndexes =
-        CoderUtil.getNullIndexes(decodingState.inputs);
-
-    // Use the caller passed buffers in erasedIndexes positions
-    for (int outputIdx = 0, i = 0;
-         i < decodingState.erasedIndexes.length; i++) {
-      boolean found = false;
-      for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
-        // If this index is one requested by the caller via erasedIndexes, then
-        // we use the passed output buffer to avoid copying data thereafter.
-        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
-          found = true;
-          adjustedByteArrayOutputsParameter[j] = CoderUtil.resetBuffer(
-              decodingState.outputs[outputIdx],
-              decodingState.outputOffsets[outputIdx], dataLen);
-          adjustedOutputOffsets[j] = decodingState.outputOffsets[outputIdx];
-          outputIdx++;
-        }
-      }
-      if (!found) {
-        throw new HadoopIllegalArgumentException(
-            "Inputs not fully corresponding to erasedIndexes in null places");
-      }
-    }
-    // Use shared buffers for other positions (not set yet)
-    for (int bufferIdx = 0, i = 0; i < erasedOrNotToReadIndexes.length; i++) {
-      if (adjustedByteArrayOutputsParameter[i] == null) {
-        adjustedByteArrayOutputsParameter[i] = CoderUtil.resetBuffer(
-            checkGetBytesArrayBuffer(bytesArrayBuffers, bufferIdx, dataLen),
-            0, dataLen);
-        adjustedOutputOffsets[i] = 0; // Always 0 for such temp output
-        bufferIdx++;
-      }
-    }
-
-    doDecodeImpl(decodingState.inputs, decodingState.inputOffsets,
-        dataLen, erasedOrNotToReadIndexes,
-        adjustedByteArrayOutputsParameter, adjustedOutputOffsets);
-  }
-
-  @Override
-  protected void doDecode(ByteBufferDecodingState decodingState) {
-    int dataLen = decodingState.decodeLength;
-    CoderUtil.resetOutputBuffers(decodingState.outputs, dataLen);
-
-    /**
-     * As passed parameters are friendly to callers but not to the underlying
-     * implementations, so we have to adjust them before calling doDecodeImpl.
-     */
-
-    int[] erasedOrNotToReadIndexes =
-        CoderUtil.getNullIndexes(decodingState.inputs);
-
-    ByteBuffer[] directBuffers = new ByteBuffer[getNumParityUnits()];
-    ByteBuffer[] adjustedDirectBufferOutputsParameter =
-        new ByteBuffer[getNumParityUnits()];
-
-    // Use the caller passed buffers in erasedIndexes positions
-    for (int outputIdx = 0, i = 0;
-         i < decodingState.erasedIndexes.length; i++) {
-      boolean found = false;
-      for (int j = 0; j < erasedOrNotToReadIndexes.length; j++) {
-        // If this index is one requested by the caller via erasedIndexes, then
-        // we use the passed output buffer to avoid copying data thereafter.
-        if (decodingState.erasedIndexes[i] == erasedOrNotToReadIndexes[j]) {
-          found = true;
-          adjustedDirectBufferOutputsParameter[j] = CoderUtil.resetBuffer(
-              decodingState.outputs[outputIdx++], dataLen);
-        }
-      }
-      if (!found) {
-        throw new HadoopIllegalArgumentException(
-            "Inputs not fully corresponding to erasedIndexes in null places");
-      }
-    }
-    // Use shared buffers for other positions (not set yet)
-    for (int bufferIdx = 0, i = 0; i < erasedOrNotToReadIndexes.length; i++) {
-      if (adjustedDirectBufferOutputsParameter[i] == null) {
-        ByteBuffer buffer = checkGetDirectBuffer(
-            directBuffers, bufferIdx, dataLen);
-        buffer.position(0);
-        buffer.limit(dataLen);
-        adjustedDirectBufferOutputsParameter[i] =
-            CoderUtil.resetBuffer(buffer, dataLen);
-        bufferIdx++;
-      }
-    }
-
-    doDecodeImpl(decodingState.inputs, erasedOrNotToReadIndexes,
-        adjustedDirectBufferOutputsParameter);
-  }
-
-  /*
-   * Convert data units first order to parity units first order.
-   */
-  private <T> void adjustOrder(T[] inputs, T[] inputs2,
-                               int[] erasedIndexes, int[] erasedIndexes2,
-                               T[] outputs, T[] outputs2) {
-    // Example:
-    // d0 d1 d2 d3 d4 d5 : p0 p1 p2 => p0 p1 p2 : d0 d1 d2 d3 d4 d5
-    System.arraycopy(inputs, getNumDataUnits(), inputs2,
-        0, getNumParityUnits());
-    System.arraycopy(inputs, 0, inputs2,
-        getNumParityUnits(), getNumDataUnits());
-
-    int numErasedDataUnits = 0, numErasedParityUnits = 0;
-    int idx = 0;
-    for (int i = 0; i < erasedIndexes.length; i++) {
-      if (erasedIndexes[i] >= getNumDataUnits()) {
-        erasedIndexes2[idx++] = erasedIndexes[i] - getNumDataUnits();
-        numErasedParityUnits++;
-      }
-    }
-    for (int i = 0; i < erasedIndexes.length; i++) {
-      if (erasedIndexes[i] < getNumDataUnits()) {
-        erasedIndexes2[idx++] = erasedIndexes[i] + getNumParityUnits();
-        numErasedDataUnits++;
-      }
-    }
-
-    // Copy for data units
-    System.arraycopy(outputs, numErasedDataUnits, outputs2,
-        0, numErasedParityUnits);
-    // Copy for parity units
-    System.arraycopy(outputs, 0, outputs2,
-        numErasedParityUnits, numErasedDataUnits);
-  }
-
-  private static byte[] checkGetBytesArrayBuffer(byte[][] bytesArrayBuffers,
-      int idx, int bufferLen) {
-    if (bytesArrayBuffers[idx] == null ||
-        bytesArrayBuffers[idx].length < bufferLen) {
-      bytesArrayBuffers[idx] = new byte[bufferLen];
-    }
-    return bytesArrayBuffers[idx];
-  }
-
-  private static ByteBuffer checkGetDirectBuffer(ByteBuffer[] directBuffers,
-      int idx, int bufferLen) {
-    if (directBuffers[idx] == null ||
-        directBuffers[idx].capacity() < bufferLen) {
-      directBuffers[idx] = ByteBuffer.allocateDirect(bufferLen);
-    }
-    return directBuffers[idx];
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
deleted file mode 100644
index ed1c83b..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoderLegacy.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
-
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-/**
- * A raw erasure encoder in RS code scheme in pure Java in case native one
- * isn't available in some environment. Please always use native implementations
- * when possible.
- */
-@InterfaceAudience.Private
-public class RSRawEncoderLegacy extends RawErasureEncoder {
-  private int[] generatingPolynomial;
-
-  public RSRawEncoderLegacy(ErasureCoderOptions coderOptions) {
-    super(coderOptions);
-
-    assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
-
-    int[] primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
-        getNumParityUnits());
-    // compute generating polynomial
-    int[] gen = {1};
-    int[] poly = new int[2];
-    for (int i = 0; i < getNumParityUnits(); i++) {
-      poly[0] = primitivePower[i];
-      poly[1] = 1;
-      gen = RSUtil.GF.multiply(gen, poly);
-    }
-    // generating polynomial has all generating roots
-    generatingPolynomial = gen;
-  }
-
-  @Override
-  protected void doEncode(ByteBufferEncodingState encodingState) {
-    CoderUtil.resetOutputBuffers(encodingState.outputs,
-        encodingState.encodeLength);
-    // parity units + data units
-    ByteBuffer[] all = new ByteBuffer[encodingState.outputs.length +
-        encodingState.inputs.length];
-
-    if (allowChangeInputs()) {
-      System.arraycopy(encodingState.outputs, 0, all, 0,
-          encodingState.outputs.length);
-      System.arraycopy(encodingState.inputs, 0, all,
-          encodingState.outputs.length, encodingState.inputs.length);
-    } else {
-      System.arraycopy(encodingState.outputs, 0, all, 0,
-          encodingState.outputs.length);
-
-      /**
-       * Note when this coder would be really (rarely) used in a production
-       * system, this can  be optimized to cache and reuse the new allocated
-       * buffers avoiding reallocating.
-       */
-      ByteBuffer tmp;
-      for (int i = 0; i < encodingState.inputs.length; i++) {
-        tmp = ByteBuffer.allocate(encodingState.inputs[i].remaining());
-        tmp.put(encodingState.inputs[i]);
-        tmp.flip();
-        all[encodingState.outputs.length + i] = tmp;
-      }
-    }
-
-    // Compute the remainder
-    RSUtil.GF.remainder(all, generatingPolynomial);
-  }
-
-  @Override
-  protected void doEncode(ByteArrayEncodingState encodingState) {
-    int dataLen = encodingState.encodeLength;
-    CoderUtil.resetOutputBuffers(encodingState.outputs,
-        encodingState.outputOffsets, dataLen);
-    // parity units + data units
-    byte[][] all = new byte[encodingState.outputs.length +
-        encodingState.inputs.length][];
-    int[] allOffsets = new int[encodingState.outputOffsets.length +
-        encodingState.inputOffsets.length];
-
-    if (allowChangeInputs()) {
-      System.arraycopy(encodingState.outputs, 0, all, 0,
-          encodingState.outputs.length);
-      System.arraycopy(encodingState.inputs, 0, all,
-          encodingState.outputs.length, encodingState.inputs.length);
-
-      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
-          encodingState.outputOffsets.length);
-      System.arraycopy(encodingState.inputOffsets, 0, allOffsets,
-          encodingState.outputOffsets.length,
-          encodingState.inputOffsets.length);
-    } else {
-      System.arraycopy(encodingState.outputs, 0, all, 0,
-          encodingState.outputs.length);
-      System.arraycopy(encodingState.outputOffsets, 0, allOffsets, 0,
-          encodingState.outputOffsets.length);
-
-      for (int i = 0; i < encodingState.inputs.length; i++) {
-        all[encodingState.outputs.length + i] =
-            Arrays.copyOfRange(encodingState.inputs[i],
-            encodingState.inputOffsets[i],
-                encodingState.inputOffsets[i] + dataLen);
-      }
-    }
-
-    // Compute the remainder
-    RSUtil.GF.remainder(all, allOffsets, dataLen, generatingPolynomial);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
deleted file mode 100644
index f0ebb3b..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactoryLegacy.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
-
-/**
- * A raw coder factory for the legacy raw Reed-Solomon coder in Java.
- */
-@InterfaceAudience.Private
-public class RSRawErasureCoderFactoryLegacy implements RawErasureCoderFactory {
-
-  @Override
-  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
-    return new RSRawEncoderLegacy(coderOptions);
-  }
-
-  @Override
-  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
-    return new RSRawDecoderLegacy(coderOptions);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 954eb34..6fa70fd 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -677,7 +677,7 @@
 
 <property>
   <name>io.erasurecode.codec.rs-legacy.rawcoders</name>
-  <value>org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy</value>
+  <value>org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawErasureCoderFactory</value>
   <description>
     Comma separated raw coder implementations for the rs-legacy codec. The earlier
     factory is prior to followings in case of failure of creating raw coders.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
index db31993..b5cada6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.io.erasurecode;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoderLegacy;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoderLegacy;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@@ -62,10 +62,10 @@ public class TestCodecRawCoderMapping {
     // should return default raw coder of rs-legacy codec
     encoder = CodecUtil.createRawEncoder(conf,
         ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSRawEncoderLegacy);
+    Assert.assertTrue(encoder instanceof RSLegacyRawEncoder);
     decoder = CodecUtil.createRawDecoder(conf,
         ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSRawDecoderLegacy);
+    Assert.assertTrue(decoder instanceof RSLegacyRawDecoder);
   }
 
   @Test
@@ -122,10 +122,10 @@ public class TestCodecRawCoderMapping {
     // should return default raw coder of rs-legacy codec
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
             conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSRawEncoderLegacy);
+    Assert.assertTrue(encoder instanceof RSLegacyRawEncoder);
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
             conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSRawDecoderLegacy);
+    Assert.assertTrue(decoder instanceof RSLegacyRawDecoder);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
index 4492e2f..bb4a1f0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
@@ -60,7 +60,7 @@ public final class RawErasureCoderBenchmark {
   private static final List<RawErasureCoderFactory> CODER_MAKERS =
       Collections.unmodifiableList(
           Arrays.asList(new DummyRawErasureCoderFactory(),
-              new RSRawErasureCoderFactoryLegacy(),
+              new RSLegacyRawErasureCoderFactory(),
               new RSRawErasureCoderFactory(),
               new NativeRSRawErasureCoderFactory()));
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
index 5be9b4e..6f5871c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
@@ -29,8 +29,8 @@ import java.nio.ByteBuffer;
 public class TestDummyRawCoder extends TestRawCoderBase {
   @Before
   public void setup() {
-    encoderClass = DummyRawEncoder.class;
-    decoderClass = DummyRawDecoder.class;
+    encoderFactoryClass = DummyRawErasureCoderFactory.class;
+    decoderFactoryClass = DummyRawErasureCoderFactory.class;
     setAllowDump(false);
     setChunkSize(baseChunkSize);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
index edbb9df..e3536d8 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
@@ -30,8 +30,8 @@ public class TestNativeRSRawCoder extends TestRSRawCoderBase {
   @Before
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
-    this.encoderClass = NativeRSRawEncoder.class;
-    this.decoderClass = NativeRSRawDecoder.class;
+    this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class;
+    this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java
index ba4b7b4..5adefbe 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java
@@ -29,8 +29,8 @@ public class TestNativeXORRawCoder extends TestXORRawCoderBase {
   @Before
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
-    this.encoderClass = NativeXORRawEncoder.class;
-    this.decoderClass = NativeXORRawDecoder.class;
+    this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class;
+    this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java
new file mode 100644
index 0000000..c01aed9
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.junit.Before;
+
+/**
+ * Test the legacy raw Reed-solomon coder implemented in Java.
+ */
+public class TestRSLegacyRawCoder extends TestRSRawCoderBase {
+
+  @Before
+  public void setup() {
+    this.encoderFactoryClass = RSLegacyRawErasureCoderFactory.class;
+    this.decoderFactoryClass = RSLegacyRawErasureCoderFactory.class;
+    setAllowDump(false); // Change to true to allow verbose dump for debugging
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
index 5216b9b..c613ee1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
@@ -26,8 +26,8 @@ public class TestRSRawCoder extends TestRSRawCoderBase {
 
   @Before
   public void setup() {
-    this.encoderClass = RSRawEncoder.class;
-    this.decoderClass = RSRawDecoder.class;
+    this.encoderFactoryClass = RSRawErasureCoderFactory.class;
+    this.decoderFactoryClass = RSRawErasureCoderFactory.class;
     setAllowDump(false);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
index 9ce041c..c39c4e0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
@@ -30,8 +30,8 @@ public class TestRSRawCoderInteroperable1 extends TestRSRawCoderBase {
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
 
-    this.encoderClass = RSRawEncoder.class;
-    this.decoderClass = NativeRSRawDecoder.class;
+    this.encoderFactoryClass = RSRawErasureCoderFactory.class;
+    this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
index 15531f3..3c97521 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
@@ -30,8 +30,8 @@ public class TestRSRawCoderInteroperable2 extends TestRSRawCoderBase {
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
 
-    this.encoderClass = NativeRSRawEncoder.class;
-    this.decoderClass = RSRawDecoder.class;
+    this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class;
+    this.decoderFactoryClass = RSRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderLegacy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderLegacy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderLegacy.java
deleted file mode 100644
index ef58e43..0000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderLegacy.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.junit.Before;
-
-/**
- * Test the legacy raw Reed-solomon coder implemented in Java.
- */
-public class TestRSRawCoderLegacy extends TestRSRawCoderBase {
-
-  @Before
-  public void setup() {
-    this.encoderClass = RSRawEncoderLegacy.class;
-    this.decoderClass = RSRawDecoderLegacy.class;
-    setAllowDump(false); // Change to true to allow verbose dump for debugging
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
index 32f0e00..01c743a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
@@ -23,14 +23,12 @@ import org.apache.hadoop.io.erasurecode.TestCoderBase;
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.lang.reflect.Constructor;
-
 /**
  * Raw coder test base with utilities.
  */
 public abstract class TestRawCoderBase extends TestCoderBase {
-  protected Class<? extends RawErasureEncoder> encoderClass;
-  protected Class<? extends RawErasureDecoder> decoderClass;
+  protected Class<? extends RawErasureCoderFactory> encoderFactoryClass;
+  protected Class<? extends RawErasureCoderFactory> decoderFactoryClass;
   protected RawErasureEncoder encoder;
   protected RawErasureDecoder decoder;
 
@@ -234,9 +232,8 @@ public abstract class TestRawCoderBase extends TestCoderBase {
         new ErasureCoderOptions(numDataUnits, numParityUnits,
             allowChangeInputs, allowDump);
     try {
-      Constructor<? extends RawErasureEncoder> constructor =
-          encoderClass.getConstructor(ErasureCoderOptions.class);
-      return constructor.newInstance(coderConf);
+      RawErasureCoderFactory factory = encoderFactoryClass.newInstance();
+      return factory.createEncoder(coderConf);
     } catch (Exception e) {
       throw new RuntimeException("Failed to create encoder", e);
     }
@@ -251,9 +248,8 @@ public abstract class TestRawCoderBase extends TestCoderBase {
         new ErasureCoderOptions(numDataUnits, numParityUnits,
             allowChangeInputs, allowDump);
     try {
-      Constructor<? extends RawErasureDecoder> constructor =
-          decoderClass.getConstructor(ErasureCoderOptions.class);
-      return constructor.newInstance(coderConf);
+      RawErasureCoderFactory factory = encoderFactoryClass.newInstance();
+      return factory.createDecoder(coderConf);
     } catch (Exception e) {
       throw new RuntimeException("Failed to create decoder", e);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java
index aae3a04..b29cd4c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java
@@ -26,7 +26,7 @@ public class TestXORRawCoder extends TestXORRawCoderBase {
 
   @Before
   public void setup() {
-    this.encoderClass = XORRawEncoder.class;
-    this.decoderClass = XORRawDecoder.class;
+    this.encoderFactoryClass = XORRawErasureCoderFactory.class;
+    this.decoderFactoryClass = XORRawErasureCoderFactory.class;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java
index c5a809c..5238a86 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java
@@ -29,8 +29,8 @@ public class TestXORRawCoderInteroperable1 extends TestXORRawCoderBase {
   @Before
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
-    this.encoderClass = XORRawEncoder.class;
-    this.decoderClass = NativeXORRawDecoder.class;
+    this.encoderFactoryClass = XORRawErasureCoderFactory.class;
+    this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a22fe02f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java
index 5345eb6..b835107 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java
@@ -29,8 +29,8 @@ public class TestXORRawCoderInteroperable2 extends TestXORRawCoderBase {
   @Before
   public void setup() {
     Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
-    this.encoderClass = NativeXORRawEncoder.class;
-    this.decoderClass = XORRawDecoder.class;
+    this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class;
+    this.decoderFactoryClass = XORRawErasureCoderFactory.class;
     setAllowDump(true);
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message