Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id AC0F71164C for ; Thu, 25 Sep 2014 03:09:30 +0000 (UTC) Received: (qmail 37385 invoked by uid 500); 25 Sep 2014 03:09:24 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 37295 invoked by uid 500); 25 Sep 2014 03:09:24 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 37177 invoked by uid 99); 25 Sep 2014 03:09:24 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 25 Sep 2014 03:09:24 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 087EE913CBE; Thu, 25 Sep 2014 03:09:23 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: arp@apache.org To: common-commits@hadoop.apache.org Date: Thu, 25 Sep 2014 03:09:30 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [11/18] git commit: HADOOP-11064. UnsatisifedLinkError with hadoop 2.4 JARs on hadoop-2.6 due to NativeCRC32 method changes. Contributed by Chris Nauroth. HADOOP-11064. UnsatisifedLinkError with hadoop 2.4 JARs on hadoop-2.6 due to NativeCRC32 method changes. Contributed by Chris Nauroth. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cbf0ae74 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cbf0ae74 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cbf0ae74 Branch: refs/heads/HDFS-6581 Commit: cbf0ae742ae3db964550df11c4044d3e16013959 Parents: 39c8734 Author: cnauroth Authored: Wed Sep 24 15:30:13 2014 -0700 Committer: cnauroth Committed: Wed Sep 24 15:30:13 2014 -0700 ---------------------------------------------------------------------- hadoop-common-project/hadoop-common/CHANGES.txt | 3 + .../org/apache/hadoop/util/NativeCrc32.java | 33 +++ .../src/org/apache/hadoop/util/NativeCrc32.c | 12 + .../org/apache/hadoop/util/TestNativeCrc32.java | 229 +++++++++++++++++++ 4 files changed, 277 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbf0ae74/hadoop-common-project/hadoop-common/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 5f4ae1a..b2a119f 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -856,6 +856,9 @@ Release 2.6.0 - UNRELEASED wrong value if excluded nodes passed are not part of the cluster tree (vinayakumarb) + HADOOP-11064. UnsatisifedLinkError with hadoop 2.4 JARs on hadoop-2.6 due to + NativeCRC32 method changes. (cnauroth) + Release 2.5.1 - 2014-09-05 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbf0ae74/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java index 0807d2c..cacf006 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java @@ -21,6 +21,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.fs.ChecksumException; +import com.google.common.annotations.VisibleForTesting; + /** * Wrapper around JNI support code to do checksum computation * natively. @@ -86,6 +88,37 @@ class NativeCrc32 { data, dataOffset, dataLength, "", 0, false); } + + /** + * Verify the given buffers of data and checksums, and throw an exception + * if any checksum is invalid. The buffers given to this function should + * have their position initially at the start of the data, and their limit + * set at the end of the data. The position, limit, and mark are not + * modified. This method is retained only for backwards-compatibility with + * prior jar versions that need the corresponding JNI function. + * + * @param bytesPerSum the chunk size (eg 512 bytes) + * @param checksumType the DataChecksum type constant + * @param sums the DirectByteBuffer pointing at the beginning of the + * stored checksums + * @param sumsOffset start offset in sums buffer + * @param data the DirectByteBuffer pointing at the beginning of the + * data to check + * @param dataOffset start offset in data buffer + * @param dataLength length of data buffer + * @param fileName the name of the file being verified + * @param basePos the position in the file where the data buffer starts + * @throws ChecksumException if there is an invalid checksum + * @deprecated use {@link #nativeComputeChunkedSums(int, int, ByteBuffer, int, + * ByteBuffer, int, int, String, long, boolean)} instead + */ + @Deprecated + @VisibleForTesting + static native void nativeVerifyChunkedSums( + int bytesPerSum, int checksumType, + ByteBuffer sums, int sumsOffset, + ByteBuffer data, int dataOffset, int dataLength, + String fileName, long basePos) throws ChecksumException; private static native void nativeComputeChunkedSums( int bytesPerSum, int checksumType, http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbf0ae74/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c index 899c59f..74e09e6 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c @@ -181,6 +181,18 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChun } } +JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunkedSums + (JNIEnv *env, jclass clazz, + jint bytes_per_checksum, jint j_crc_type, + jobject j_sums, jint sums_offset, + jobject j_data, jint data_offset, jint data_len, + jstring j_filename, jlong base_pos) +{ + Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSums(env, clazz, + bytes_per_checksum, j_crc_type, j_sums, sums_offset, j_data, data_offset, + data_len, j_filename, base_pos, JNI_TRUE); +} + JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSumsByteArray (JNIEnv *env, jclass clazz, jint bytes_per_checksum, jint j_crc_type, http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbf0ae74/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCrc32.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCrc32.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCrc32.java new file mode 100644 index 0000000..aecdc8f --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCrc32.java @@ -0,0 +1,229 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import static org.junit.Assert.*; +import static org.junit.Assume.*; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.ChecksumException; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class TestNativeCrc32 { + + private static final long BASE_POSITION = 0; + private static final int IO_BYTES_PER_CHECKSUM_DEFAULT = 512; + private static final String IO_BYTES_PER_CHECKSUM_KEY = + "io.bytes.per.checksum"; + private static final int NUM_CHUNKS = 3; + + private final DataChecksum.Type checksumType; + + private int bytesPerChecksum; + private String fileName; + private ByteBuffer data, checksums; + private DataChecksum checksum; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Parameters + public static Collection data() { + Collection params = new ArrayList(2); + params.add(new Object[] { DataChecksum.Type.CRC32 }); + params.add(new Object[] { DataChecksum.Type.CRC32C }); + return params; + } + + public TestNativeCrc32(DataChecksum.Type checksumType) { + this.checksumType = checksumType; + } + + @Before + public void setup() { + assumeTrue(NativeCrc32.isAvailable()); + assertEquals( + "These tests assume they can write a checksum value as a 4-byte int.", 4, + checksumType.size); + Configuration conf = new Configuration(); + bytesPerChecksum = conf.getInt(IO_BYTES_PER_CHECKSUM_KEY, + IO_BYTES_PER_CHECKSUM_DEFAULT); + fileName = this.getClass().getSimpleName(); + checksum = DataChecksum.newDataChecksum(checksumType, bytesPerChecksum); + } + + @Test + public void testVerifyChunkedSumsSuccess() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndValidChecksums(); + NativeCrc32.verifyChunkedSums(bytesPerChecksum, checksumType.id, + checksums, data, fileName, BASE_POSITION); + } + + @Test + public void testVerifyChunkedSumsFail() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndInvalidChecksums(); + exception.expect(ChecksumException.class); + NativeCrc32.verifyChunkedSums(bytesPerChecksum, checksumType.id, + checksums, data, fileName, BASE_POSITION); + } + + @Test + public void testVerifyChunkedSumsByteArraySuccess() throws ChecksumException { + allocateArrayByteBuffers(); + fillDataAndValidChecksums(); + NativeCrc32.verifyChunkedSumsByteArray(bytesPerChecksum, checksumType.id, + checksums.array(), checksums.position(), data.array(), data.position(), + data.remaining(), fileName, BASE_POSITION); + } + + @Test + public void testVerifyChunkedSumsByteArrayFail() throws ChecksumException { + allocateArrayByteBuffers(); + fillDataAndInvalidChecksums(); + exception.expect(ChecksumException.class); + NativeCrc32.verifyChunkedSumsByteArray(bytesPerChecksum, checksumType.id, + checksums.array(), checksums.position(), data.array(), data.position(), + data.remaining(), fileName, BASE_POSITION); + } + + @Test + public void testCalculateChunkedSumsSuccess() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndValidChecksums(); + NativeCrc32.calculateChunkedSums(bytesPerChecksum, checksumType.id, + checksums, data); + } + + @Test + public void testCalculateChunkedSumsFail() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndInvalidChecksums(); + NativeCrc32.calculateChunkedSums(bytesPerChecksum, checksumType.id, + checksums, data); + } + + @Test + public void testCalculateChunkedSumsByteArraySuccess() throws ChecksumException { + allocateArrayByteBuffers(); + fillDataAndValidChecksums(); + NativeCrc32.calculateChunkedSumsByteArray(bytesPerChecksum, checksumType.id, + checksums.array(), checksums.position(), data.array(), data.position(), + data.remaining()); + } + + @Test + public void testCalculateChunkedSumsByteArrayFail() throws ChecksumException { + allocateArrayByteBuffers(); + fillDataAndInvalidChecksums(); + NativeCrc32.calculateChunkedSumsByteArray(bytesPerChecksum, checksumType.id, + checksums.array(), checksums.position(), data.array(), data.position(), + data.remaining()); + } + + @Test + @SuppressWarnings("deprecation") + public void testNativeVerifyChunkedSumsSuccess() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndValidChecksums(); + NativeCrc32.nativeVerifyChunkedSums(bytesPerChecksum, checksumType.id, + checksums, checksums.position(), data, data.position(), data.remaining(), + fileName, BASE_POSITION); + } + + @Test + @SuppressWarnings("deprecation") + public void testNativeVerifyChunkedSumsFail() throws ChecksumException { + allocateDirectByteBuffers(); + fillDataAndInvalidChecksums(); + exception.expect(ChecksumException.class); + NativeCrc32.nativeVerifyChunkedSums(bytesPerChecksum, checksumType.id, + checksums, checksums.position(), data, data.position(), data.remaining(), + fileName, BASE_POSITION); + } + + /** + * Allocates data buffer and checksums buffer as arrays on the heap. + */ + private void allocateArrayByteBuffers() { + data = ByteBuffer.wrap(new byte[bytesPerChecksum * NUM_CHUNKS]); + checksums = ByteBuffer.wrap(new byte[NUM_CHUNKS * checksumType.size]); + } + + /** + * Allocates data buffer and checksums buffer as direct byte buffers. + */ + private void allocateDirectByteBuffers() { + data = ByteBuffer.allocateDirect(bytesPerChecksum * NUM_CHUNKS); + checksums = ByteBuffer.allocateDirect(NUM_CHUNKS * checksumType.size); + } + + /** + * Fill data buffer with monotonically increasing byte values. Overflow is + * fine, because it's just test data. Update the checksum with the same byte + * values. After every chunk, write the checksum to the checksums buffer. + * After finished writing, flip the buffers to prepare them for reading. + */ + private void fillDataAndValidChecksums() { + for (int i = 0; i < NUM_CHUNKS; ++i) { + for (int j = 0; j < bytesPerChecksum; ++j) { + byte b = (byte)((i * bytesPerChecksum + j) & 0xFF); + data.put(b); + checksum.update(b); + } + checksums.putInt((int)checksum.getValue()); + checksum.reset(); + } + data.flip(); + checksums.flip(); + } + + /** + * Fill data buffer with monotonically increasing byte values. Overflow is + * fine, because it's just test data. Update the checksum with different byte + * byte values, so that the checksums are incorrect intentionally. After every + * chunk, write the checksum to the checksums buffer. After finished writing, + * flip the buffers to prepare them for reading. + */ + private void fillDataAndInvalidChecksums() { + for (int i = 0; i < NUM_CHUNKS; ++i) { + for (int j = 0; j < bytesPerChecksum; ++j) { + byte b = (byte)((i * bytesPerChecksum + j) & 0xFF); + data.put(b); + checksum.update((byte)(b + 1)); + } + checksums.putInt((int)checksum.getValue()); + checksum.reset(); + } + data.flip(); + checksums.flip(); + } +}