Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 971BF18C9A for ; Thu, 7 May 2015 01:09:06 +0000 (UTC) Received: (qmail 21750 invoked by uid 500); 7 May 2015 01:09:06 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 21687 invoked by uid 500); 7 May 2015 01:09:06 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 21677 invoked by uid 99); 7 May 2015 01:09:06 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 07 May 2015 01:09:06 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 319FCDFF69; Thu, 7 May 2015 01:09:06 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: drankye@apache.org To: common-commits@hadoop.apache.org Message-Id: <1314decc339846dcaa1ac98aa8c0786a@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HADOOP-11921. Enhance tests for erasure coders. Contributed by Kai Zheng Date: Thu, 7 May 2015 01:09:06 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/HDFS-7285 16ba1a508 -> 0f7eb4636 HADOOP-11921. Enhance tests for erasure coders. Contributed by Kai Zheng Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0f7eb463 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0f7eb463 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0f7eb463 Branch: refs/heads/HDFS-7285 Commit: 0f7eb46362752da1972e1b669e67cadef30c5547 Parents: 16ba1a5 Author: Kai Zheng Authored: Thu May 7 17:05:04 2015 +0800 Committer: Kai Zheng Committed: Thu May 7 17:05:04 2015 +0800 ---------------------------------------------------------------------- .../hadoop-common/CHANGES-HDFS-EC-7285.txt | 2 + .../hadoop/io/erasurecode/TestCoderBase.java | 50 ++++++----- .../erasurecode/coder/TestErasureCoderBase.java | 89 +++++++++++--------- .../erasurecode/coder/TestRSErasureCoder.java | 64 ++++++++++---- .../io/erasurecode/coder/TestXORCoder.java | 24 ++++-- .../io/erasurecode/rawcoder/TestRSRawCoder.java | 76 +++++++++-------- .../rawcoder/TestRSRawCoderBase.java | 51 +++++++++++ .../erasurecode/rawcoder/TestRawCoderBase.java | 45 +++++----- .../erasurecode/rawcoder/TestXORRawCoder.java | 24 ++++-- 9 files changed, 274 insertions(+), 151 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt index 9749270..7a344a8 100644 --- a/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt +++ b/hadoop-common-project/hadoop-common/CHANGES-HDFS-EC-7285.txt @@ -44,3 +44,5 @@ HADOOP-11818. Minor improvements for erasurecode classes. (Rakesh R via Kai Zheng) HADOOP-11841. Remove unused ecschema-def.xml files. (szetszwo) + + HADOOP-11921 Enhance tests for erasure coders. (Kai Zheng) http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java index 22fd98d..be1924c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java @@ -49,15 +49,15 @@ public abstract class TestCoderBase { * Prepare before running the case. * @param numDataUnits * @param numParityUnits - * @param erasedIndexes + * @param erasedDataIndexes */ protected void prepare(Configuration conf, int numDataUnits, - int numParityUnits, int[] erasedIndexes) { + int numParityUnits, int[] erasedDataIndexes) { this.conf = conf; this.numDataUnits = numDataUnits; this.numParityUnits = numParityUnits; - this.erasedDataIndexes = erasedIndexes != null ? - erasedIndexes : new int[] {0}; + this.erasedDataIndexes = erasedDataIndexes != null ? + erasedDataIndexes : new int[] {0}; } /** @@ -82,15 +82,19 @@ public abstract class TestCoderBase { } /** - * Adjust and return erased indexes based on the array of the input chunks ( - * parity chunks + data chunks). - * @return + * Adjust and return erased indexes altogether, including erased data indexes + * and parity indexes. + * @return erased indexes altogether */ protected int[] getErasedIndexesForDecoding() { int[] erasedIndexesForDecoding = new int[erasedDataIndexes.length]; + + int idx = 0; + for (int i = 0; i < erasedDataIndexes.length; i++) { - erasedIndexesForDecoding[i] = erasedDataIndexes[i] + numParityUnits; + erasedIndexesForDecoding[idx ++] = erasedDataIndexes[i] + numParityUnits; } + return erasedIndexesForDecoding; } @@ -116,30 +120,23 @@ public abstract class TestCoderBase { } /** - * Have a copy of the data chunks that's to be erased thereafter. The copy - * will be used to compare and verify with the to be recovered chunks. + * Erase chunks to test the recovering of them. Before erasure clone them + * first so could return them. * @param dataChunks - * @return + * @return clone of erased chunks */ - protected ECChunk[] copyDataChunksToErase(ECChunk[] dataChunks) { - ECChunk[] copiedChunks = new ECChunk[erasedDataIndexes.length]; - - int j = 0; - for (int i = 0; i < erasedDataIndexes.length; i++) { - copiedChunks[j ++] = cloneChunkWithData(dataChunks[erasedDataIndexes[i]]); - } + protected ECChunk[] backupAndEraseChunks(ECChunk[] dataChunks) { + ECChunk[] toEraseChunks = new ECChunk[erasedDataIndexes.length]; - return copiedChunks; - } + int idx = 0; - /** - * Erase some data chunks to test the recovering of them - * @param dataChunks - */ - protected void eraseSomeDataBlocks(ECChunk[] dataChunks) { for (int i = 0; i < erasedDataIndexes.length; i++) { - eraseDataFromChunk(dataChunks[erasedDataIndexes[i]]); + ECChunk chunk = dataChunks[erasedDataIndexes[i]]; + toEraseChunks[idx ++] = cloneChunkWithData(chunk); + eraseDataFromChunk(chunk); } + + return toEraseChunks; } /** @@ -277,6 +274,7 @@ public abstract class TestCoderBase { */ protected ECChunk[] prepareOutputChunksForDecoding() { ECChunk[] chunks = new ECChunk[erasedDataIndexes.length]; + for (int i = 0; i < chunks.length; i++) { chunks[i] = allocateOutputChunk(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java index 05a62a7..fdd0b50 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java @@ -29,6 +29,9 @@ public abstract class TestErasureCoderBase extends TestCoderBase { protected Class encoderClass; protected Class decoderClass; + private ErasureCoder encoder; + private ErasureCoder decoder; + protected int numChunksInBlock = 16; /** @@ -54,39 +57,27 @@ public abstract class TestErasureCoderBase extends TestCoderBase { */ protected void testCoding(boolean usingDirectBuffer) { this.usingDirectBuffer = usingDirectBuffer; - - ErasureCoder encoder = createEncoder(); + prepareCoders(); // Generate data and encode ECBlockGroup blockGroup = prepareBlockGroupForEncoding(); // Backup all the source chunks for later recovering because some coders // may affect the source data. - TestBlock[] clonedDataBlocks = cloneBlocksWithData((TestBlock[]) - blockGroup.getDataBlocks()); - // Make a copy of a strip for later comparing - TestBlock[] toEraseBlocks = copyDataBlocksToErase(clonedDataBlocks); + TestBlock[] clonedDataBlocks = cloneBlocksWithData((TestBlock[]) blockGroup.getDataBlocks()); ErasureCodingStep codingStep; - try { - codingStep = encoder.calculateCoding(blockGroup); - performCodingStep(codingStep); - } finally { - encoder.release(); - } - // Erase the copied sources - eraseSomeDataBlocks(clonedDataBlocks); + codingStep = encoder.calculateCoding(blockGroup); + performCodingStep(codingStep); + // Erase specified sources but return copies of them for later comparing + TestBlock[] backupBlocks = backupAndEraseBlocks(clonedDataBlocks); - //Decode + // Decode blockGroup = new ECBlockGroup(clonedDataBlocks, blockGroup.getParityBlocks()); - ErasureCoder decoder = createDecoder(); - try { - codingStep = decoder.calculateCoding(blockGroup); - performCodingStep(codingStep); - } finally { - decoder.release(); - } - //Compare - compareAndVerify(toEraseBlocks, codingStep.getOutputBlocks()); + codingStep = decoder.calculateCoding(blockGroup); + performCodingStep(codingStep); + + // Compare + compareAndVerify(backupBlocks, codingStep.getOutputBlocks()); } /** @@ -129,8 +120,7 @@ public abstract class TestErasureCoderBase extends TestCoderBase { protected void compareAndVerify(ECBlock[] erasedBlocks, ECBlock[] recoveredBlocks) { for (int i = 0; i < erasedBlocks.length; ++i) { - compareAndVerify(((TestBlock) erasedBlocks[i]).chunks, - ((TestBlock) recoveredBlocks[i]).chunks); + compareAndVerify(((TestBlock) erasedBlocks[i]).chunks, ((TestBlock) recoveredBlocks[i]).chunks); } } @@ -151,6 +141,16 @@ public abstract class TestErasureCoderBase extends TestCoderBase { return encoder; } + private void prepareCoders() { + if (encoder == null) { + encoder = createEncoder(); + } + + if (decoder == null) { + decoder = createDecoder(); + } + } + /** * Create the erasure decoder for the test. * @return @@ -202,6 +202,26 @@ public abstract class TestErasureCoderBase extends TestCoderBase { } /** + * Erase blocks to test the recovering of them. Before erasure clone them + * first so could return themselves. + * @param dataBlocks + * @return clone of erased dataBlocks + */ + protected TestBlock[] backupAndEraseBlocks(TestBlock[] dataBlocks) { + TestBlock[] toEraseBlocks = new TestBlock[erasedDataIndexes.length]; + + int idx = 0; + + for (int i = 0; i < erasedDataIndexes.length; i++) { + TestBlock block = dataBlocks[erasedDataIndexes[i]]; + toEraseBlocks[idx ++] = cloneBlockWithData(block); + eraseDataFromBlock(block); + } + + return toEraseBlocks; + } + + /** * Copy those data blocks that's to be erased for later comparing and * verifying. * @param dataBlocks @@ -255,22 +275,9 @@ public abstract class TestErasureCoderBase extends TestCoderBase { } /** - * Erase some data blocks specified by the indexes from the data blocks. - * @param dataBlocks - */ - protected void eraseSomeDataBlocks(TestBlock[] dataBlocks) { - for (int i = 0; i < erasedDataIndexes.length; ++i) { - eraseDataFromBlock(dataBlocks, erasedDataIndexes[i]); - } - } - - /** - * Erase data from a block specified by erased index. - * @param blocks - * @param erasedIndex + * Erase data from a block. */ - protected void eraseDataFromBlock(TestBlock[] blocks, int erasedIndex) { - TestBlock theBlock = blocks[erasedIndex]; + protected void eraseDataFromBlock(TestBlock theBlock) { eraseDataFromChunks(theBlock.chunks); theBlock.setErased(true); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java index 3507dd2..7d9d37a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java @@ -40,19 +40,18 @@ public class TestRSErasureCoder extends TestErasureCoderBase { } @Test - public void testCodingNoDirectBuffer_10x4() { - prepare(null, 10, 4, null); + public void testCodingNoDirectBuffer_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + /** + * Doing twice to test if the coders can be repeatedly reused. This matters + * as the underlying coding buffers are shared, which may have bugs. + */ + testCoding(false); testCoding(false); } @Test - public void testCodingDirectBuffer_10x4() { - prepare(null, 10, 4, null); - testCoding(true); - } - - @Test - public void testCodingDirectBufferWithConf_10x4() { + public void testCodingDirectBufferWithConf_10x4_erasing_d0() { /** * This tests if the two configuration items work or not. */ @@ -61,31 +60,62 @@ public class TestRSErasureCoder extends TestErasureCoderBase { RSRawErasureCoderFactory.class.getCanonicalName()); conf.setBoolean( CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_USEXOR_KEY, false); - prepare(conf, 10, 4, null); + + prepare(conf, 10, 4, new int[]{0}); + testCoding(true); } @Test - public void testCodingDirectBuffer_10x4_erasure_of_2_4() { + public void testCodingDirectBuffer_10x4_erasing_d2() { + prepare(null, 10, 4, new int[] {2}); + testCoding(true); + testCoding(true); + } + + @Test + public void testCodingDirectBuffer_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + testCoding(true); + testCoding(true); + } + + @Test + public void testCodingBothBuffers_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + + /** + * Doing in mixed buffer usage model to test if the coders can be repeatedly + * reused with different buffer usage model. This matters as the underlying + * coding buffers are shared, which may have bugs. + */ + testCoding(true); + testCoding(false); + testCoding(true); + testCoding(false); + } + + @Test + public void testCodingDirectBuffer_10x4_erasure_of_d2_d4() { prepare(null, 10, 4, new int[] {2, 4}); testCoding(true); } @Test - public void testCodingDirectBuffer_10x4_erasing_all() { - prepare(null, 10, 4, new int[] {0, 1, 2, 3}); + public void testCodingDirectBuffer_10x4_erasing_d0_d1() { + prepare(null, 10, 4, new int[] {0, 1}); testCoding(true); } @Test - public void testCodingNoDirectBuffer_3x3() { - prepare(null, 3, 3, null); + public void testCodingNoDirectBuffer_3x3_erasing_d0() { + prepare(null, 3, 3, new int[] {0}); testCoding(false); } @Test - public void testCodingDirectBuffer_3x3() { - prepare(null, 3, 3, null); + public void testCodingDirectBuffer_3x3_erasing_d0() { + prepare(null, 3, 3, new int[] {0}); testCoding(true); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java index 109e46e..87aa656 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java @@ -32,19 +32,33 @@ public class TestXORCoder extends TestErasureCoderBase { this.numDataUnits = 10; this.numParityUnits = 1; - this.erasedDataIndexes = new int[] {0}; - this.numChunksInBlock = 10; } @Test - public void testCodingNoDirectBuffer() { + public void testCodingNoDirectBuffer_erasing_d0() { + prepare(null, 10, 1, new int[] {0}); + + /** + * Doing twice to test if the coders can be repeatedly reused. This matters + * as the underlying coding buffers are shared, which may have bugs. + */ + testCoding(false); testCoding(false); } @Test - public void testCodingDirectBuffer() { + public void testCodingBothBuffers_erasing_d5() { + prepare(null, 10, 1, new int[]{5}); + + /** + * Doing in mixed buffer usage model to test if the coders can be repeatedly + * reused with different buffer usage model. This matters as the underlying + * coding buffers are shared, which may have bugs. + */ testCoding(true); + testCoding(false); + testCoding(true); + testCoding(false); } - } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java index 8bb5d0f..9ba3e88 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java @@ -17,26 +17,13 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.apache.hadoop.io.erasurecode.ECChunk; -import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; import org.junit.Before; import org.junit.Test; -import java.nio.ByteBuffer; - /** - * Test raw Reed-solomon encoding and decoding. + * Test raw Reed-solomon coder implemented in Java. */ -public class TestRSRawCoder extends TestRawCoderBase { - - private static int symbolSize = 0; - private static int symbolMax = 0; - - static { - symbolSize = (int) Math.round(Math.log( - RSUtil.GF.getFieldSize()) / Math.log(2)); - symbolMax = (int) Math.pow(2, symbolSize); - } +public class TestRSRawCoder extends TestRSRawCoderBase { @Before public void setup() { @@ -45,49 +32,66 @@ public class TestRSRawCoder extends TestRawCoderBase { } @Test - public void testCodingNoDirectBuffer_10x4() { - prepare(null, 10, 4, null); + public void testCodingNoDirectBuffer_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + /** + * Doing twice to test if the coders can be repeatedly reused. This matters + * as the underlying coding buffers are shared, which may have bugs. + */ + testCoding(false); testCoding(false); } @Test - public void testCodingDirectBuffer_10x4() { - prepare(null, 10, 4, null); + public void testCodingDirectBuffer_10x4_erasing_d2() { + prepare(null, 10, 4, new int[] {2}); + testCoding(true); testCoding(true); } @Test - public void testCodingDirectBuffer_10x4_erasure_of_2_4() { - prepare(null, 10, 4, new int[] {2, 4}); + public void testCodingDirectBuffer_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + testCoding(true); testCoding(true); } @Test - public void testCodingDirectBuffer_10x4_erasing_all() { - prepare(null, 10, 4, new int[] {0, 1, 2, 3}); + public void testCodingBothBuffers_10x4_erasing_d0() { + prepare(null, 10, 4, new int[] {0}); + + /** + * Doing in mixed buffer usage model to test if the coders can be repeatedly + * reused with different buffer usage model. This matters as the underlying + * coding buffers are shared, which may have bugs. + */ + testCoding(true); + testCoding(false); testCoding(true); + testCoding(false); } @Test - public void testCodingNoDirectBuffer_3x3() { - prepare(null, 3, 3, null); - testCoding(false); + public void testCodingDirectBuffer_10x4_erasure_of_d2_d4() { + prepare(null, 10, 4, new int[] {2, 4}); + testCoding(true); } @Test - public void testCodingDirectBuffer_3x3() { - prepare(null, 3, 3, null); + public void testCodingDirectBuffer_10x4_erasing_d0_d1() { + prepare(null, 10, 4, new int[] {0, 1}); testCoding(true); } - @Override - protected ECChunk generateDataChunk() { - ByteBuffer buffer = allocateOutputBuffer(); - for (int i = 0; i < chunkSize; i++) { - buffer.put((byte) RAND.nextInt(symbolMax)); - } - buffer.flip(); + @Test + public void testCodingNoDirectBuffer_3x3_erasing_d0() { + prepare(null, 3, 3, new int[] {0}); + testCoding(false); + } - return new ECChunk(buffer); + @Test + public void testCodingDirectBuffer_3x3_erasing_d0() { + prepare(null, 3, 3, new int[] {0}); + testCoding(true); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java new file mode 100644 index 0000000..f9e8a6b --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.io.erasurecode.rawcoder; + +import org.apache.hadoop.io.erasurecode.ECChunk; +import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; +import org.junit.Before; +import org.junit.Test; + +import java.nio.ByteBuffer; + +/** + * Test base for raw Reed-solomon coders. + */ +public abstract class TestRSRawCoderBase extends TestRawCoderBase { + + private static int symbolSize = 0; + private static int symbolMax = 0; + + static { + symbolSize = (int) Math.round(Math.log( + RSUtil.GF.getFieldSize()) / Math.log(2)); + symbolMax = (int) Math.pow(2, symbolSize); + } + + @Override + protected ECChunk generateDataChunk() { + ByteBuffer buffer = allocateOutputBuffer(); + for (int i = 0; i < chunkSize; i++) { + buffer.put((byte) RAND.nextInt(symbolMax)); + } + buffer.flip(); + + return new ECChunk(buffer); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java index b036eed..7ba320a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java @@ -26,6 +26,8 @@ import org.apache.hadoop.io.erasurecode.TestCoderBase; public abstract class TestRawCoderBase extends TestCoderBase { protected Class encoderClass; protected Class decoderClass; + private RawErasureEncoder encoder; + private RawErasureDecoder decoder; /** * Generating source data, encoding, recovering and then verifying. @@ -37,40 +39,41 @@ public abstract class TestRawCoderBase extends TestCoderBase { */ protected void testCoding(boolean usingDirectBuffer) { this.usingDirectBuffer = usingDirectBuffer; + prepareCoders(); // Generate data and encode ECChunk[] dataChunks = prepareDataChunksForEncoding(); ECChunk[] parityChunks = prepareParityChunksForEncoding(); - RawErasureEncoder encoder = createEncoder(); // Backup all the source chunks for later recovering because some coders // may affect the source data. ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks); - // Make a copy of a strip for later comparing - ECChunk[] toEraseDataChunks = copyDataChunksToErase(clonedDataChunks); - try { - encoder.encode(dataChunks, parityChunks); - } finally { - encoder.release(); - } - // Erase the copied sources - eraseSomeDataBlocks(clonedDataChunks); + encoder.encode(dataChunks, parityChunks); + + // Backup and erase some chunks + ECChunk[] backupChunks = backupAndEraseChunks(clonedDataChunks); + + // Decode + ECChunk[] inputChunks = prepareInputChunksForDecoding( + clonedDataChunks, parityChunks); - //Decode - ECChunk[] inputChunks = prepareInputChunksForDecoding(clonedDataChunks, - parityChunks); ECChunk[] recoveredChunks = prepareOutputChunksForDecoding(); - RawErasureDecoder decoder = createDecoder(); - try { - decoder.decode(inputChunks, - getErasedIndexesForDecoding(), recoveredChunks); - } finally { - decoder.release(); + + decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); + + // Compare + compareAndVerify(backupChunks, recoveredChunks); + } + + private void prepareCoders() { + if (encoder == null) { + encoder = createEncoder(); } - //Compare - compareAndVerify(toEraseDataChunks, recoveredChunks); + if (decoder == null) { + decoder = createDecoder(); + } } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f7eb463/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java index e66e48d..62ce4fb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java @@ -32,18 +32,32 @@ public class TestXORRawCoder extends TestRawCoderBase { this.numDataUnits = 10; this.numParityUnits = 1; - - this.erasedDataIndexes = new int[] {0}; } @Test - public void testCodingNoDirectBuffer() { + public void testCodingNoDirectBuffer_erasing_d0() { + prepare(null, 10, 1, new int[] {0}); + + /** + * Doing twice to test if the coders can be repeatedly reused. This matters + * as the underlying coding buffers are shared, which may have bugs. + */ + testCoding(false); testCoding(false); } @Test - public void testCodingDirectBuffer() { + public void testCodingBothBuffers_erasing_d5() { + prepare(null, 10, 1, new int[]{5}); + + /** + * Doing in mixed buffer usage model to test if the coders can be repeatedly + * reused with different buffer usage model. This matters as the underlying + * coding buffers are shared, which may have bugs. + */ testCoding(true); + testCoding(false); + testCoding(true); + testCoding(false); } - }