Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 52359 invoked from network); 26 Sep 2008 22:16:17 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 26 Sep 2008 22:16:17 -0000 Received: (qmail 20977 invoked by uid 500); 26 Sep 2008 22:16:15 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 20815 invoked by uid 500); 26 Sep 2008 22:16:15 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 20806 invoked by uid 99); 26 Sep 2008 22:16:15 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 26 Sep 2008 15:16:15 -0700 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 26 Sep 2008 22:15:22 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 54E2823889C4; Fri, 26 Sep 2008 15:15:56 -0700 (PDT) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r699522 - in /hadoop/core/branches/branch-0.17: CHANGES.txt src/java/org/apache/hadoop/fs/FSInputChecker.java src/test/org/apache/hadoop/dfs/TestFSInputChecker.java Date: Fri, 26 Sep 2008 22:15:55 -0000 To: core-commits@hadoop.apache.org From: rangadi@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20080926221556.54E2823889C4@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: rangadi Date: Fri Sep 26 15:15:54 2008 New Revision: 699522 URL: http://svn.apache.org/viewvc?rev=699522&view=rev Log: HADOOP-4217. Checksum input stream can sometimes return invalid data to the user. (Ning Li via rangadi) Modified: hadoop/core/branches/branch-0.17/CHANGES.txt hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java Modified: hadoop/core/branches/branch-0.17/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/CHANGES.txt?rev=699522&r1=699521&r2=699522&view=diff ============================================================================== --- hadoop/core/branches/branch-0.17/CHANGES.txt (original) +++ hadoop/core/branches/branch-0.17/CHANGES.txt Fri Sep 26 15:15:54 2008 @@ -7,6 +7,9 @@ HADOOP-4277. Checksum verification was mistakenly disabled for LocalFileSystem. (Raghu Angadi) + HADOOP-4217. Checksum input stream can sometimes return invalid + data to the user. (Ning Li via rangadi) + Release 0.17.2 - 2008-08-11 BUG FIXES Modified: hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java?rev=699522&r1=699521&r2=699522&view=diff ============================================================================== --- hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java (original) +++ hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java Fri Sep 26 15:15:54 2008 @@ -174,7 +174,6 @@ private void fill( ) throws IOException { assert(pos>=count); // fill internal buffer - pos = 0; count = readChecksumChunk(buf, 0, buf.length); } @@ -226,6 +225,9 @@ */ private int readChecksumChunk(byte b[], int off, int len) throws IOException { + // invalidate buffer + count = pos = 0; + int read = 0; boolean retry = true; int retriesLeft = numOfRetries; @@ -248,9 +250,6 @@ throw ce; } - // invalidate buffer - count = pos = 0; - // try a new replica if (seekToNewSource(chunkPos)) { // Since at least one of the sources is different, Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java?rev=699522&r1=699521&r2=699522&view=diff ============================================================================== --- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java (original) +++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java Fri Sep 26 15:15:54 2008 @@ -292,6 +292,7 @@ try { testChecker(fileSys, true); testChecker(fileSys, false); + testSeekAndRead(fileSys); } finally { fileSys.close(); cluster.shutdown(); @@ -304,8 +305,41 @@ testChecker(fileSys, true); testChecker(fileSys, false); testFileCorruption((LocalFileSystem)fileSys); + testSeekAndRead(fileSys); }finally { fileSys.close(); } } + + private void testSeekAndRead(ChecksumFileSystem fileSys) + throws IOException { + Path file = new Path("try.dat"); + writeFile(fileSys, file); + stm = fileSys.open(file, + fileSys.getConf().getInt("io.file.buffer.size", 4096)); + checkSeekAndRead(); + stm.close(); + cleanupFile(fileSys, file); + } + + private void checkSeekAndRead() throws IOException { + int position = 1; + int len = 2 * BYTES_PER_SUM - (int) position; + readAndCompare(stm, position, len); + + position = BYTES_PER_SUM; + len = BYTES_PER_SUM; + readAndCompare(stm, position, len); + } + + private void readAndCompare(FSDataInputStream in, int position, int len) + throws IOException { + byte[] b = new byte[len]; + in.seek(position); + IOUtils.readFully(in, b, 0, b.length); + + for (int i = 0; i < b.length; i++) { + assertEquals(expected[position + i], b[i]); + } + } }