hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From wan...@apache.org
Subject [07/50] [abbrv] hadoop git commit: HADOOP-12826. Rename the new Java coder and make it default. Contributed by Rui Li.
Date Mon, 21 Mar 2016 20:14:20 GMT
HADOOP-12826. Rename the new Java coder and make it default. Contributed by Rui Li.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/19e8f076
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/19e8f076
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/19e8f076

Branch: refs/heads/YARN-3368
Commit: 19e8f076919932b17f24ec4090df1926677651e7
Parents: 1898810
Author: Zhe Zhang <zezhang@zezhang-ld1.linkedin.biz>
Authored: Mon Mar 14 16:45:32 2016 -0700
Committer: Zhe Zhang <zezhang@zezhang-ld1.linkedin.biz>
Committed: Mon Mar 14 16:45:32 2016 -0700

----------------------------------------------------------------------
 .../apache/hadoop/io/erasurecode/CodecUtil.java |  13 +-
 .../io/erasurecode/rawcoder/RSRawDecoder.java   | 175 ++++++++++++++++++
 .../io/erasurecode/rawcoder/RSRawDecoder2.java  | 176 -------------------
 .../io/erasurecode/rawcoder/RSRawEncoder.java   |  75 ++++++++
 .../io/erasurecode/rawcoder/RSRawEncoder2.java  |  76 --------
 .../rawcoder/RSRawErasureCoderFactory.java      |  37 ++++
 .../rawcoder/RSRawErasureCoderFactory2.java     |  37 ----
 .../io/erasurecode/rawcoder/util/RSUtil.java    | 149 +++++++++++++++-
 .../io/erasurecode/rawcoder/util/RSUtil2.java   | 172 ------------------
 .../coder/TestHHXORErasureCoder.java            |   4 +-
 .../erasurecode/coder/TestRSErasureCoder.java   |   8 +-
 .../io/erasurecode/rawcoder/TestRSRawCoder.java |  33 ++++
 .../erasurecode/rawcoder/TestRSRawCoder2.java   |  33 ----
 13 files changed, 483 insertions(+), 505 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
index fd5bd67..a2354b6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
@@ -20,7 +20,14 @@ package org.apache.hadoop.io.erasurecode;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.io.erasurecode.rawcoder.*;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory;
+import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.XORRawEncoder;
 
 /**
  * A codec & coder utility to help create raw coders conveniently.
@@ -43,7 +50,7 @@ public final class CodecUtil {
         CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
         true, numDataUnits, numParityUnits);
     if (rawCoder == null) {
-      rawCoder = new RSRawEncoderLegacy(numDataUnits, numParityUnits);
+      rawCoder = new RSRawEncoder(numDataUnits, numParityUnits);
     }
 
     return (RawErasureEncoder) rawCoder;
@@ -62,7 +69,7 @@ public final class CodecUtil {
         CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
         false, numDataUnits, numParityUnits);
     if (rawCoder == null) {
-      rawCoder = new RSRawDecoderLegacy(numDataUnits, numParityUnits);
+      rawCoder = new RSRawDecoder(numDataUnits, numParityUnits);
     }
 
     return (RawErasureDecoder) rawCoder;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
new file mode 100644
index 0000000..5b9e0e9
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.CoderUtil;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.GF256;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+/**
+ * A raw erasure decoder in RS code scheme in pure Java in case native one
+ * isn't available in some environment. Please always use native implementations
+ * when possible. This new Java coder is about 5X faster than the one originated
+ * from HDFS-RAID, and also compatible with the native/ISA-L coder.
+ */
+@InterfaceAudience.Private
+public class RSRawDecoder extends AbstractRawErasureDecoder {
+  //relevant to schema and won't change during decode calls
+  private byte[] encodeMatrix;
+
+  /**
+   * Below are relevant to schema and erased indexes, thus may change during
+   * decode calls.
+   */
+  private byte[] decodeMatrix;
+  private byte[] invertMatrix;
+  /**
+   * Array of input tables generated from coding coefficients previously.
+   * Must be of size 32*k*rows
+   */
+  private byte[] gfTables;
+  private int[] cachedErasedIndexes;
+  private int[] validIndexes;
+  private int numErasedDataUnits;
+  private boolean[] erasureFlags;
+
+  public RSRawDecoder(int numDataUnits, int numParityUnits) {
+    super(numDataUnits, numParityUnits);
+    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
+      throw new HadoopIllegalArgumentException(
+              "Invalid getNumDataUnits() and numParityUnits");
+    }
+
+    int numAllUnits = getNumDataUnits() + numParityUnits;
+    encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
+    RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
+    if (isAllowingVerboseDump()) {
+      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
+    }
+  }
+
+  @Override
+  protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
+                          ByteBuffer[] outputs) {
+    prepareDecoding(inputs, erasedIndexes);
+
+    ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
+    for (int i = 0; i < getNumDataUnits(); i++) {
+      realInputs[i] = inputs[validIndexes[i]];
+    }
+    RSUtil.encodeData(gfTables, realInputs, outputs);
+  }
+
+  @Override
+  protected void doDecode(byte[][] inputs, int[] inputOffsets,
+                          int dataLen, int[] erasedIndexes,
+                          byte[][] outputs, int[] outputOffsets) {
+    prepareDecoding(inputs, erasedIndexes);
+
+    byte[][] realInputs = new byte[getNumDataUnits()][];
+    int[] realInputOffsets = new int[getNumDataUnits()];
+    for (int i = 0; i < getNumDataUnits(); i++) {
+      realInputs[i] = inputs[validIndexes[i]];
+      realInputOffsets[i] = inputOffsets[validIndexes[i]];
+    }
+    RSUtil.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
+            outputs, outputOffsets);
+  }
+
+  private <T> void prepareDecoding(T[] inputs, int[] erasedIndexes) {
+    int[] tmpValidIndexes = new int[getNumDataUnits()];
+    CoderUtil.makeValidIndexes(inputs, tmpValidIndexes);
+    if (Arrays.equals(this.cachedErasedIndexes, erasedIndexes) &&
+        Arrays.equals(this.validIndexes, tmpValidIndexes)) {
+      return; // Optimization. Nothing to do
+    }
+    this.cachedErasedIndexes =
+            Arrays.copyOf(erasedIndexes, erasedIndexes.length);
+    this.validIndexes =
+            Arrays.copyOf(tmpValidIndexes, tmpValidIndexes.length);
+
+    processErasures(erasedIndexes);
+  }
+
+  private void processErasures(int[] erasedIndexes) {
+    this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
+    this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
+    this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
+
+    this.erasureFlags = new boolean[getNumAllUnits()];
+    this.numErasedDataUnits = 0;
+
+    for (int i = 0; i < erasedIndexes.length; i++) {
+      int index = erasedIndexes[i];
+      erasureFlags[index] = true;
+      if (index < getNumDataUnits()) {
+        numErasedDataUnits++;
+      }
+    }
+
+    generateDecodeMatrix(erasedIndexes);
+
+    RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
+        decodeMatrix, 0, gfTables);
+    if (isAllowingVerboseDump()) {
+      System.out.println(DumpUtil.bytesToHex(gfTables, -1));
+    }
+  }
+
+  // Generate decode matrix from encode matrix
+  private void generateDecodeMatrix(int[] erasedIndexes) {
+    int i, j, r, p;
+    byte s;
+    byte[] tmpMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
+
+    // Construct matrix tmpMatrix by removing error rows
+    for (i = 0; i < getNumDataUnits(); i++) {
+      r = validIndexes[i];
+      for (j = 0; j < getNumDataUnits(); j++) {
+        tmpMatrix[getNumDataUnits() * i + j] =
+                encodeMatrix[getNumDataUnits() * r + j];
+      }
+    }
+
+    GF256.gfInvertMatrix(tmpMatrix, invertMatrix, getNumDataUnits());
+
+    for (i = 0; i < numErasedDataUnits; i++) {
+      for (j = 0; j < getNumDataUnits(); j++) {
+        decodeMatrix[getNumDataUnits() * i + j] =
+                invertMatrix[getNumDataUnits() * erasedIndexes[i] + j];
+      }
+    }
+
+    for (p = numErasedDataUnits; p < erasedIndexes.length; p++) {
+      for (i = 0; i < getNumDataUnits(); i++) {
+        s = 0;
+        for (j = 0; j < getNumDataUnits(); j++) {
+          s ^= GF256.gfMul(invertMatrix[j * getNumDataUnits() + i],
+                  encodeMatrix[getNumDataUnits() * erasedIndexes[p] + j]);
+        }
+        decodeMatrix[getNumDataUnits() * p + i] = s;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder2.java
deleted file mode 100644
index 48a3579..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawDecoder2.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.CoderUtil;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.GF256;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil2;
-
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-/**
- * A raw erasure decoder in RS code scheme in pure Java in case native one
- * isn't available in some environment. Please always use native implementations
- * when possible. This new Java coder is about 5X faster than the one originated
- * from HDFS-RAID, and also compatible with the native/ISA-L coder.
- */
-@InterfaceAudience.Private
-public class RSRawDecoder2 extends AbstractRawErasureDecoder {
-  //relevant to schema and won't change during decode calls
-  private byte[] encodeMatrix;
-
-  /**
-   * Below are relevant to schema and erased indexes, thus may change during
-   * decode calls.
-   */
-  private byte[] decodeMatrix;
-  private byte[] invertMatrix;
-  /**
-   * Array of input tables generated from coding coefficients previously.
-   * Must be of size 32*k*rows
-   */
-  private byte[] gfTables;
-  private int[] cachedErasedIndexes;
-  private int[] validIndexes;
-  private int numErasedDataUnits;
-  private boolean[] erasureFlags;
-
-  public RSRawDecoder2(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
-    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
-      throw new HadoopIllegalArgumentException(
-              "Invalid getNumDataUnits() and numParityUnits");
-    }
-
-    int numAllUnits = getNumDataUnits() + numParityUnits;
-    encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
-    RSUtil2.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
-    if (isAllowingVerboseDump()) {
-      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
-    }
-  }
-
-  @Override
-  protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
-                          ByteBuffer[] outputs) {
-    prepareDecoding(inputs, erasedIndexes);
-
-    ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
-    for (int i = 0; i < getNumDataUnits(); i++) {
-      realInputs[i] = inputs[validIndexes[i]];
-    }
-    RSUtil2.encodeData(gfTables, realInputs, outputs);
-  }
-
-  @Override
-  protected void doDecode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, int[] erasedIndexes,
-                          byte[][] outputs, int[] outputOffsets) {
-    prepareDecoding(inputs, erasedIndexes);
-
-    byte[][] realInputs = new byte[getNumDataUnits()][];
-    int[] realInputOffsets = new int[getNumDataUnits()];
-    for (int i = 0; i < getNumDataUnits(); i++) {
-      realInputs[i] = inputs[validIndexes[i]];
-      realInputOffsets[i] = inputOffsets[validIndexes[i]];
-    }
-    RSUtil2.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
-            outputs, outputOffsets);
-  }
-
-  private <T> void prepareDecoding(T[] inputs, int[] erasedIndexes) {
-    int[] tmpValidIndexes = new int[getNumDataUnits()];
-    CoderUtil.makeValidIndexes(inputs, tmpValidIndexes);
-    if (Arrays.equals(this.cachedErasedIndexes, erasedIndexes) &&
-        Arrays.equals(this.validIndexes, tmpValidIndexes)) {
-      return; // Optimization. Nothing to do
-    }
-    this.cachedErasedIndexes =
-            Arrays.copyOf(erasedIndexes, erasedIndexes.length);
-    this.validIndexes =
-            Arrays.copyOf(tmpValidIndexes, tmpValidIndexes.length);
-
-    processErasures(erasedIndexes);
-  }
-
-  private void processErasures(int[] erasedIndexes) {
-    this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
-    this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
-    this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
-
-    this.erasureFlags = new boolean[getNumAllUnits()];
-    this.numErasedDataUnits = 0;
-
-    for (int i = 0; i < erasedIndexes.length; i++) {
-      int index = erasedIndexes[i];
-      erasureFlags[index] = true;
-      if (index < getNumDataUnits()) {
-        numErasedDataUnits++;
-      }
-    }
-
-    generateDecodeMatrix(erasedIndexes);
-
-    RSUtil2.initTables(getNumDataUnits(), erasedIndexes.length,
-        decodeMatrix, 0, gfTables);
-    if (isAllowingVerboseDump()) {
-      System.out.println(DumpUtil.bytesToHex(gfTables, -1));
-    }
-  }
-
-  // Generate decode matrix from encode matrix
-  private void generateDecodeMatrix(int[] erasedIndexes) {
-    int i, j, r, p;
-    byte s;
-    byte[] tmpMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
-
-    // Construct matrix tmpMatrix by removing error rows
-    for (i = 0; i < getNumDataUnits(); i++) {
-      r = validIndexes[i];
-      for (j = 0; j < getNumDataUnits(); j++) {
-        tmpMatrix[getNumDataUnits() * i + j] =
-                encodeMatrix[getNumDataUnits() * r + j];
-      }
-    }
-
-    GF256.gfInvertMatrix(tmpMatrix, invertMatrix, getNumDataUnits());
-
-    for (i = 0; i < numErasedDataUnits; i++) {
-      for (j = 0; j < getNumDataUnits(); j++) {
-        decodeMatrix[getNumDataUnits() * i + j] =
-                invertMatrix[getNumDataUnits() * erasedIndexes[i] + j];
-      }
-    }
-
-    for (p = numErasedDataUnits; p < erasedIndexes.length; p++) {
-      for (i = 0; i < getNumDataUnits(); i++) {
-        s = 0;
-        for (j = 0; j < getNumDataUnits(); j++) {
-          s ^= GF256.gfMul(invertMatrix[j * getNumDataUnits() + i],
-                  encodeMatrix[getNumDataUnits() * erasedIndexes[p] + j]);
-        }
-        decodeMatrix[getNumDataUnits() * p + i] = s;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
new file mode 100644
index 0000000..cee6574
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
+import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
+
+import java.nio.ByteBuffer;
+
+/**
+ * A raw erasure encoder in RS code scheme in pure Java in case native one
+ * isn't available in some environment. Please always use native implementations
+ * when possible. This new Java coder is about 5X faster than the one originated
+ * from HDFS-RAID, and also compatible with the native/ISA-L coder.
+ */
+@InterfaceAudience.Private
+public class RSRawEncoder extends AbstractRawErasureEncoder {
+  // relevant to schema and won't change during encode calls.
+  private byte[] encodeMatrix;
+  /**
+   * Array of input tables generated from coding coefficients previously.
+   * Must be of size 32*k*rows
+   */
+  private byte[] gfTables;
+
+  public RSRawEncoder(int numDataUnits, int numParityUnits) {
+    super(numDataUnits, numParityUnits);
+
+    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
+      throw new HadoopIllegalArgumentException(
+          "Invalid numDataUnits and numParityUnits");
+    }
+
+    encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
+    RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
+    if (isAllowingVerboseDump()) {
+      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
+    }
+    gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
+    RSUtil.initTables(numDataUnits, numParityUnits, encodeMatrix,
+        numDataUnits * numDataUnits, gfTables);
+    if (isAllowingVerboseDump()) {
+      System.out.println(DumpUtil.bytesToHex(gfTables, -1));
+    }
+  }
+
+  @Override
+  protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
+    RSUtil.encodeData(gfTables, inputs, outputs);
+  }
+
+  @Override
+  protected void doEncode(byte[][] inputs, int[] inputOffsets,
+                          int dataLen, byte[][] outputs, int[] outputOffsets) {
+    RSUtil.encodeData(gfTables, dataLen, inputs, inputOffsets, outputs,
+        outputOffsets);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder2.java
deleted file mode 100644
index 72d77f7..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawEncoder2.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
-import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil2;
-
-import java.nio.ByteBuffer;
-
-/**
- * A raw erasure encoder in RS code scheme in pure Java in case native one
- * isn't available in some environment. Please always use native implementations
- * when possible. This new Java coder is about 5X faster than the one originated
- * from HDFS-RAID, and also compatible with the native/ISA-L coder.
- */
-@InterfaceAudience.Private
-public class RSRawEncoder2 extends AbstractRawErasureEncoder {
-  // relevant to schema and won't change during encode calls.
-  private byte[] encodeMatrix;
-  /**
-   * Array of input tables generated from coding coefficients previously.
-   * Must be of size 32*k*rows
-   */
-  private byte[] gfTables;
-
-  public RSRawEncoder2(int numDataUnits, int numParityUnits) {
-    super(numDataUnits, numParityUnits);
-
-    if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
-      throw new HadoopIllegalArgumentException(
-          "Invalid numDataUnits and numParityUnits");
-    }
-
-    encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
-    RSUtil2.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
-    if (isAllowingVerboseDump()) {
-      DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
-    }
-    gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
-    RSUtil2.initTables(numDataUnits, numParityUnits, encodeMatrix,
-        numDataUnits * numDataUnits, gfTables);
-    if (isAllowingVerboseDump()) {
-      System.out.println(DumpUtil.bytesToHex(gfTables, -1));
-    }
-  }
-
-  @Override
-  protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
-    RSUtil2.encodeData(gfTables, inputs, outputs);
-  }
-
-  @Override
-  protected void doEncode(byte[][] inputs, int[] inputOffsets,
-                          int dataLen, byte[][] outputs, int[] outputOffsets) {
-    RSUtil2.encodeData(gfTables, dataLen, inputs, inputOffsets, outputs,
-        outputOffsets);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
new file mode 100644
index 0000000..b38db4b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * A raw coder factory for the new raw Reed-Solomon coder in Java.
+ */
+@InterfaceAudience.Private
+public class RSRawErasureCoderFactory implements RawErasureCoderFactory {
+
+  @Override
+  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
+    return new RSRawEncoder(numDataUnits, numParityUnits);
+  }
+
+  @Override
+  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
+    return new RSRawDecoder(numDataUnits, numParityUnits);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory2.java
deleted file mode 100644
index 40a0f1c..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory2.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-
-/**
- * A raw coder factory for raw Reed-Solomon coder in Java.
- */
-@InterfaceAudience.Private
-public class RSRawErasureCoderFactory2 implements RawErasureCoderFactory {
-
-  @Override
-  public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
-    return new RSRawEncoder2(numDataUnits, numParityUnits);
-  }
-
-  @Override
-  public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
-    return new RSRawDecoder2(numDataUnits, numParityUnits);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
index a3b0e39..43823d0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
@@ -19,11 +19,16 @@ package org.apache.hadoop.io.erasurecode.rawcoder.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 
+import java.nio.ByteBuffer;
+
 /**
- * Utilities for implementing Reed-Solomon code, used by RS coder.
+ * Utilities for implementing Reed-Solomon code, used by RS coder. Some of the
+ * codes are borrowed from ISA-L implementation (C or ASM codes).
  */
 @InterfaceAudience.Private
-public class RSUtil {
+public final class RSUtil {
+
+  private RSUtil(){}
 
   // We always use the byte system (with symbol size 8, field size 256,
   // primitive polynomial 285, and primitive root 2).
@@ -39,4 +44,144 @@ public class RSUtil {
     return primitivePower;
   }
 
+  public static void initTables(int k, int rows, byte[] codingMatrix,
+      int matrixOffset, byte[] gfTables) {
+    int i, j;
+
+    int offset = 0, idx = matrixOffset;
+    for (i = 0; i < rows; i++) {
+      for (j = 0; j < k; j++) {
+        GF256.gfVectMulInit(codingMatrix[idx++], gfTables, offset);
+        offset += 32;
+      }
+    }
+  }
+
+  /**
+   * Ported from Intel ISA-L library.
+   */
+  public static void genCauchyMatrix(byte[] a, int m, int k) {
+    // Identity matrix in high position
+    for (int i = 0; i < k; i++) {
+      a[k * i + i] = 1;
+    }
+
+    // For the rest choose 1/(i + j) | i != j
+    int pos = k * k;
+    for (int i = k; i < m; i++) {
+      for (int j = 0; j < k; j++) {
+        a[pos++] = GF256.gfInv((byte) (i ^ j));
+      }
+    }
+  }
+
+  /**
+   * Encode a group of inputs data and generate the outputs. It's also used for
+   * decoding because, in this implementation, encoding and decoding are
+   * unified.
+   *
+   * The algorithm is ported from Intel ISA-L library for compatible. It
+   * leverages Java auto-vectorization support for performance.
+   */
+  public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs,
+      int[] inputOffsets, byte[][] outputs,
+      int[] outputOffsets) {
+    int numInputs = inputs.length;
+    int numOutputs = outputs.length;
+    int l, i, j, iPos, oPos;
+    byte[] input, output;
+    byte s;
+    final int times = dataLen / 8;
+    final int extra = dataLen - dataLen % 8;
+    byte[] tableLine;
+
+    for (l = 0; l < numOutputs; l++) {
+      output = outputs[l];
+
+      for (j = 0; j < numInputs; j++) {
+        input = inputs[j];
+        iPos = inputOffsets[j];
+        oPos = outputOffsets[l];
+
+        s = gfTables[j * 32 + l * numInputs * 32 + 1];
+        tableLine = GF256.gfMulTab()[s & 0xff];
+
+        /**
+         * Purely for performance, assuming we can use 8 bytes in the SIMD
+         * instruction. Subject to be improved.
+         */
+        for (i = 0; i < times; i++, iPos += 8, oPos += 8) {
+          output[oPos + 0] ^= tableLine[0xff & input[iPos + 0]];
+          output[oPos + 1] ^= tableLine[0xff & input[iPos + 1]];
+          output[oPos + 2] ^= tableLine[0xff & input[iPos + 2]];
+          output[oPos + 3] ^= tableLine[0xff & input[iPos + 3]];
+          output[oPos + 4] ^= tableLine[0xff & input[iPos + 4]];
+          output[oPos + 5] ^= tableLine[0xff & input[iPos + 5]];
+          output[oPos + 6] ^= tableLine[0xff & input[iPos + 6]];
+          output[oPos + 7] ^= tableLine[0xff & input[iPos + 7]];
+        }
+
+        /**
+         * For the left bytes, do it one by one.
+         */
+        for (i = extra; i < dataLen; i++, iPos++, oPos++) {
+          output[oPos] ^= tableLine[0xff & input[iPos]];
+        }
+      }
+    }
+  }
+
+  /**
+   * See above. Try to use the byte[] version when possible.
+   */
+  public static void encodeData(byte[] gfTables, ByteBuffer[] inputs,
+      ByteBuffer[] outputs) {
+    int numInputs = inputs.length;
+    int numOutputs = outputs.length;
+    int dataLen = inputs[0].remaining();
+    int l, i, j, iPos, oPos;
+    ByteBuffer input, output;
+    byte s;
+    final int times = dataLen / 8;
+    final int extra = dataLen - dataLen % 8;
+    byte[] tableLine;
+
+    for (l = 0; l < numOutputs; l++) {
+      output = outputs[l];
+
+      for (j = 0; j < numInputs; j++) {
+        input = inputs[j];
+        iPos = input.position();
+        oPos = output.position();
+
+        s = gfTables[j * 32 + l * numInputs * 32 + 1];
+        tableLine = GF256.gfMulTab()[s & 0xff];
+
+        for (i = 0; i < times; i++, iPos += 8, oPos += 8) {
+          output.put(oPos + 0, (byte) (output.get(oPos + 0) ^
+              tableLine[0xff & input.get(iPos + 0)]));
+          output.put(oPos + 1, (byte) (output.get(oPos + 1) ^
+              tableLine[0xff & input.get(iPos + 1)]));
+          output.put(oPos + 2, (byte) (output.get(oPos + 2) ^
+              tableLine[0xff & input.get(iPos + 2)]));
+          output.put(oPos + 3, (byte) (output.get(oPos + 3) ^
+              tableLine[0xff & input.get(iPos + 3)]));
+          output.put(oPos + 4, (byte) (output.get(oPos + 4) ^
+              tableLine[0xff & input.get(iPos + 4)]));
+          output.put(oPos + 5, (byte) (output.get(oPos + 5) ^
+              tableLine[0xff & input.get(iPos + 5)]));
+          output.put(oPos + 6, (byte) (output.get(oPos + 6) ^
+              tableLine[0xff & input.get(iPos + 6)]));
+          output.put(oPos + 7, (byte) (output.get(oPos + 7) ^
+              tableLine[0xff & input.get(iPos + 7)]));
+        }
+
+        for (i = extra; i < dataLen; i++, iPos++, oPos++) {
+          output.put(oPos, (byte) (output.get(oPos) ^
+              tableLine[0xff & input.get(iPos)]));
+        }
+      }
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil2.java
deleted file mode 100644
index 84121a8..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil2.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder.util;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-
-import java.nio.ByteBuffer;
-
-/**
- * Utilities for implementing Reed-Solomon code, used by RS2 coder. Some of the
- * codes are borrowed from ISA-L implementation (C or ASM codes).
- */
-@InterfaceAudience.Private
-public final class RSUtil2 {
-
-  private RSUtil2() { }
-
-  public static void initTables(int k, int rows, byte[] codingMatrix,
-                                int matrixOffset, byte[] gfTables) {
-    int i, j;
-
-    int offset = 0, idx = matrixOffset;
-    for (i = 0; i < rows; i++) {
-      for (j = 0; j < k; j++) {
-        GF256.gfVectMulInit(codingMatrix[idx++], gfTables, offset);
-        offset += 32;
-      }
-    }
-  }
-
-  /**
-   * Ported from Intel ISA-L library.
-   */
-  public static void genCauchyMatrix(byte[] a, int m, int k) {
-    // Identity matrix in high position
-    for (int i = 0; i < k; i++) {
-      a[k * i + i] = 1;
-    }
-
-    // For the rest choose 1/(i + j) | i != j
-    int pos = k * k;
-    for (int i = k; i < m; i++) {
-      for (int j = 0; j < k; j++) {
-        a[pos++] = GF256.gfInv((byte) (i ^ j));
-      }
-    }
-  }
-
-  /**
-   * Encode a group of inputs data and generate the outputs. It's also used for
-   * decoding because, in this implementation, encoding and decoding are
-   * unified.
-   *
-   * The algorithm is ported from Intel ISA-L library for compatible. It
-   * leverages Java auto-vectorization support for performance.
-   */
-  public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs,
-                                int[] inputOffsets, byte[][] outputs,
-                                int[] outputOffsets) {
-    int numInputs = inputs.length;
-    int numOutputs = outputs.length;
-    int l, i, j, iPos, oPos;
-    byte[] input, output;
-    byte s;
-    final int times = dataLen / 8;
-    final int extra = dataLen - dataLen % 8;
-    byte[] tableLine;
-
-    for (l = 0; l < numOutputs; l++) {
-      output = outputs[l];
-
-      for (j = 0; j < numInputs; j++) {
-        input = inputs[j];
-        iPos = inputOffsets[j];
-        oPos = outputOffsets[l];
-
-        s = gfTables[j * 32 + l * numInputs * 32 + 1];
-        tableLine = GF256.gfMulTab()[s & 0xff];
-
-        /**
-         * Purely for performance, assuming we can use 8 bytes in the SIMD
-         * instruction. Subject to be improved.
-         */
-        for (i = 0; i < times; i++, iPos += 8, oPos += 8) {
-          output[oPos + 0] ^= tableLine[0xff & input[iPos + 0]];
-          output[oPos + 1] ^= tableLine[0xff & input[iPos + 1]];
-          output[oPos + 2] ^= tableLine[0xff & input[iPos + 2]];
-          output[oPos + 3] ^= tableLine[0xff & input[iPos + 3]];
-          output[oPos + 4] ^= tableLine[0xff & input[iPos + 4]];
-          output[oPos + 5] ^= tableLine[0xff & input[iPos + 5]];
-          output[oPos + 6] ^= tableLine[0xff & input[iPos + 6]];
-          output[oPos + 7] ^= tableLine[0xff & input[iPos + 7]];
-        }
-
-        /**
-         * For the left bytes, do it one by one.
-         */
-        for (i = extra; i < dataLen; i++, iPos++, oPos++) {
-          output[oPos] ^= tableLine[0xff & input[iPos]];
-        }
-      }
-    }
-  }
-
-  /**
-   * See above. Try to use the byte[] version when possible.
-   */
-  public static void encodeData(byte[] gfTables, ByteBuffer[] inputs,
-                                ByteBuffer[] outputs) {
-    int numInputs = inputs.length;
-    int numOutputs = outputs.length;
-    int dataLen = inputs[0].remaining();
-    int l, i, j, iPos, oPos;
-    ByteBuffer input, output;
-    byte s;
-    final int times = dataLen / 8;
-    final int extra = dataLen - dataLen % 8;
-    byte[] tableLine;
-
-    for (l = 0; l < numOutputs; l++) {
-      output = outputs[l];
-
-      for (j = 0; j < numInputs; j++) {
-        input = inputs[j];
-        iPos = input.position();
-        oPos = output.position();
-
-        s = gfTables[j * 32 + l * numInputs * 32 + 1];
-        tableLine = GF256.gfMulTab()[s & 0xff];
-
-        for (i = 0; i < times; i++, iPos += 8, oPos += 8) {
-          output.put(oPos + 0, (byte) (output.get(oPos + 0) ^
-              tableLine[0xff & input.get(iPos + 0)]));
-          output.put(oPos + 1, (byte) (output.get(oPos + 1) ^
-              tableLine[0xff & input.get(iPos + 1)]));
-          output.put(oPos + 2, (byte) (output.get(oPos + 2) ^
-              tableLine[0xff & input.get(iPos + 2)]));
-          output.put(oPos + 3, (byte) (output.get(oPos + 3) ^
-              tableLine[0xff & input.get(iPos + 3)]));
-          output.put(oPos + 4, (byte) (output.get(oPos + 4) ^
-              tableLine[0xff & input.get(iPos + 4)]));
-          output.put(oPos + 5, (byte) (output.get(oPos + 5) ^
-              tableLine[0xff & input.get(iPos + 5)]));
-          output.put(oPos + 6, (byte) (output.get(oPos + 6) ^
-              tableLine[0xff & input.get(iPos + 6)]));
-          output.put(oPos + 7, (byte) (output.get(oPos + 7) ^
-              tableLine[0xff & input.get(iPos + 7)]));
-        }
-
-        for (i = extra; i < dataLen; i++, iPos++, oPos++) {
-          output.put(oPos, (byte) (output.get(oPos) ^
-              tableLine[0xff & input.get(iPos)]));
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java
index 6372d48..ad346e0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.erasurecode.coder;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -51,7 +51,7 @@ public class TestHHXORErasureCoder extends TestHHErasureCoderBase {
      */
     Configuration conf = new Configuration();
     conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
-        RSRawErasureCoderFactoryLegacy.class.getCanonicalName());
+        RSRawErasureCoderFactory.class.getCanonicalName());
     prepare(conf, 10, 4, new int[]{0}, new int[0]);
 
     testCoding(true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
index 213eee6..ee2348e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.erasurecode.coder;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -58,20 +58,20 @@ public class TestRSErasureCoder extends TestErasureCoderBase {
      */
     Configuration conf = new Configuration();
     conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
-        RSRawErasureCoderFactoryLegacy.class.getCanonicalName());
+        RSRawErasureCoderFactory.class.getCanonicalName());
     prepare(conf, 10, 4, new int[]{0}, new int[0]);
 
     testCoding(true);
     testCoding(true);
   }
-  
+
   @Test
   public void testCodingDirectBuffer_10x4_erasing_p1() {
     prepare(null, 10, 4, new int[]{}, new int[]{1});
     testCoding(true);
     testCoding(true);
   }
-  
+
   @Test
   public void testCodingDirectBuffer_10x4_erasing_d2() {
     prepare(null, 10, 4, new int[] {2}, new int[] {});

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
new file mode 100644
index 0000000..5216b9b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.junit.Before;
+
+/**
+ * Test the new raw Reed-solomon coder implemented in Java.
+ */
+public class TestRSRawCoder extends TestRSRawCoderBase {
+
+  @Before
+  public void setup() {
+    this.encoderClass = RSRawEncoder.class;
+    this.decoderClass = RSRawDecoder.class;
+    setAllowDump(false);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19e8f076/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder2.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder2.java
deleted file mode 100644
index 3e11d14..0000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder2.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.erasurecode.rawcoder;
-
-import org.junit.Before;
-
-/**
- * Test the new raw Reed-solomon coder implemented in Java.
- */
-public class TestRSRawCoder2 extends TestRSRawCoderBase {
-
-  @Before
-  public void setup() {
-    this.encoderClass = RSRawEncoder2.class;
-    this.decoderClass = RSRawDecoder2.class;
-    setAllowDump(false);
-  }
-}


Mime
View raw message