Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 25369 invoked from network); 4 Dec 2008 22:21:04 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 4 Dec 2008 22:21:04 -0000 Received: (qmail 80270 invoked by uid 500); 4 Dec 2008 22:21:15 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 80245 invoked by uid 500); 4 Dec 2008 22:21:15 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 80233 invoked by uid 99); 4 Dec 2008 22:21:15 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 04 Dec 2008 14:21:15 -0800 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 04 Dec 2008 22:19:54 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 4256323888A0; Thu, 4 Dec 2008 14:20:42 -0800 (PST) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r723469 - in /hadoop/core/branches/branch-0.19: ./ src/core/org/apache/hadoop/fs/ src/hdfs/org/apache/hadoop/hdfs/ src/test/org/apache/hadoop/hdfs/ Date: Thu, 04 Dec 2008 22:20:41 -0000 To: core-commits@hadoop.apache.org From: szetszwo@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20081204222042.4256323888A0@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: szetszwo Date: Thu Dec 4 14:20:41 2008 New Revision: 723469 URL: http://svn.apache.org/viewvc?rev=723469&view=rev Log: HADOOP-4508. Fix FSDataOutputStream.getPos() for append. (dhruba via szetszwo) Modified: hadoop/core/branches/branch-0.19/CHANGES.txt hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FSDataOutputStream.java hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java Modified: hadoop/core/branches/branch-0.19/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/CHANGES.txt?rev=723469&r1=723468&r2=723469&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/CHANGES.txt (original) +++ hadoop/core/branches/branch-0.19/CHANGES.txt Thu Dec 4 14:20:41 2008 @@ -18,6 +18,9 @@ HADOOP-4632. Fix TestJobHistoryVersion to use test.build.dir instead of the current workding directory for scratch space. (Amar Kamat via cdouglas) + HADOOP-4508. Fix FSDataOutputStream.getPos() for append. (dhruba via + szetszwo) + Release 0.19.0 - 2008-11-18 INCOMPATIBLE CHANGES Modified: hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FSDataOutputStream.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FSDataOutputStream.java?rev=723469&r1=723468&r2=723469&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FSDataOutputStream.java (original) +++ hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FSDataOutputStream.java Thu Dec 4 14:20:41 2008 @@ -30,9 +30,11 @@ long position; public PositionCache(OutputStream out, - FileSystem.Statistics stats) throws IOException { + FileSystem.Statistics stats, + long pos) throws IOException { super(out); statistics = stats; + position = pos; } public void write(int b) throws IOException { @@ -67,7 +69,12 @@ public FSDataOutputStream(OutputStream out, FileSystem.Statistics stats) throws IOException { - super(new PositionCache(out, stats)); + this(out, stats, 0); + } + + public FSDataOutputStream(OutputStream out, FileSystem.Statistics stats, + long startPosition) throws IOException { + super(new PositionCache(out, stats, startPosition)); wrappedStream = out; } Modified: hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=723469&r1=723468&r2=723469&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original) +++ hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Thu Dec 4 14:20:41 2008 @@ -2024,6 +2024,7 @@ private int recoveryErrorCount = 0; // number of times block recovery failed private int maxRecoveryErrorCount = 5; // try block recovery 5 times private volatile boolean appendChunk = false; // appending to existing partial block + private long initialFileSize = 0; // at time of file open private void setLastException(IOException e) { if (lastException == null) { @@ -2600,6 +2601,7 @@ LocatedBlock lastBlock, FileStatus stat, int bytesPerChecksum) throws IOException { this(src, stat.getBlockSize(), progress, bytesPerChecksum); + initialFileSize = stat.getLen(); // length of file when opened // // The last partial block of the file has to be filled. @@ -3155,6 +3157,13 @@ synchronized void setTestFilename(String newname) { src = newname; } + + /** + * Returns the size of a file as it was when this stream was opened + */ + long getInitialLen() { + return initialFileSize; + } } void reportChecksumFailure(String file, Block blk, DatanodeInfo dn) { Modified: hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=723469&r1=723468&r2=723469&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java (original) +++ hadoop/core/branches/branch-0.19/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java Thu Dec 4 14:20:41 2008 @@ -32,6 +32,7 @@ import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction; import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream; import org.apache.hadoop.util.*; @@ -157,8 +158,8 @@ public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { - return new FSDataOutputStream( - dfs.append(getPathName(f), bufferSize, progress), statistics); + DFSOutputStream op = (DFSOutputStream)dfs.append(getPathName(f), bufferSize, progress); + return new FSDataOutputStream(op, statistics, op.getInitialLen()); } public FSDataOutputStream create(Path f, FsPermission permission, Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java?rev=723469&r1=723468&r2=723469&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java (original) +++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java Thu Dec 4 14:20:41 2008 @@ -156,6 +156,10 @@ // write the remainder of the file stm = fs.append(file1); + + // ensure getPos is set to reflect existing size of the file + assertTrue(stm.getPos() > 0); + System.out.println("Writing " + (fileSize - mid2) + " bytes to file " + file1); stm.write(fileContents, mid2, fileSize - mid2); System.out.println("Written second part of file");