Return-Path: Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: (qmail 75439 invoked from network); 21 Jun 2010 22:26:02 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 21 Jun 2010 22:26:02 -0000 Received: (qmail 60274 invoked by uid 500); 21 Jun 2010 22:26:02 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 60225 invoked by uid 500); 21 Jun 2010 22:26:02 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 60217 invoked by uid 99); 21 Jun 2010 22:26:02 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 21 Jun 2010 22:26:02 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 21 Jun 2010 22:25:59 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 4D1B723888EA; Mon, 21 Jun 2010 22:25:12 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r956720 - in /hadoop/hdfs/branches/branch-0.21: ./ src/java/org/apache/hadoop/hdfs/ src/test/hdfs/org/apache/hadoop/hdfs/ src/test/hdfs/org/apache/hadoop/security/ Date: Mon, 21 Jun 2010 22:25:12 -0000 To: hdfs-commits@hadoop.apache.org From: tomwhite@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20100621222512.4D1B723888EA@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: tomwhite Date: Mon Jun 21 22:25:11 2010 New Revision: 956720 URL: http://svn.apache.org/viewvc?rev=956720&view=rev Log: Merge -r 956718:956719 from trunk to branch-0.21. Fixes: HDFS-609. Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/security/TestPermission.java Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original) +++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Mon Jun 21 22:25:11 2010 @@ -887,6 +887,9 @@ Release 0.21.0 - Unreleased HDFS-1000. Updates libhdfs to the new API for UGI (ddas) + HDFS-609. Create a file with the append flag does not work in HDFS. + (tomwhite) + Release 0.20.3 - Unreleased IMPROVEMENTS Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Mon Jun 21 22:25:11 2010 @@ -219,11 +219,12 @@ public class DistributedFileSystem exten @Override public FSDataOutputStream create(Path f, FsPermission permission, - EnumSet flag, int bufferSize, short replication, long blockSize, + boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { return new FSDataOutputStream(dfs.create(getPathName(f), permission, - flag, replication, blockSize, progress, bufferSize), + overwrite ? EnumSet.of(CreateFlag.OVERWRITE) : EnumSet.of(CreateFlag.CREATE), + replication, blockSize, progress, bufferSize), statistics); } @@ -240,7 +241,6 @@ public class DistributedFileSystem exten /** * Same as create(), except fails if parent directory doesn't already exist. - * @see #create(Path, FsPermission, EnumSet, int, short, long, Progressable) */ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet flag, int bufferSize, short replication, Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java Mon Jun 21 22:25:11 2010 @@ -305,7 +305,7 @@ public class HftpFileSystem extends File @Override public FSDataOutputStream create(Path f, FsPermission permission, - EnumSet flag, int bufferSize, short replication, + boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { throw new IOException("Not supported"); } Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java Mon Jun 21 22:25:11 2010 @@ -174,7 +174,7 @@ public class TestDFSPermission extends T // create the file/directory switch (op) { case CREATE: - FSDataOutputStream out = fs.create(name, permission, EnumSet.of(CreateFlag.OVERWRITE), + FSDataOutputStream out = fs.create(name, permission, true, conf.getInt("io.file.buffer.size", 4096), fs.getDefaultReplication(), fs.getDefaultBlockSize(), null); out.close(); Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java Mon Jun 21 22:25:11 2010 @@ -56,7 +56,7 @@ public class TestFSInputChecker extends private void writeFile(FileSystem fileSys, Path name) throws IOException { // create and write a file that contains three blocks of data FSDataOutputStream stm = fileSys.create(name, new FsPermission((short)0777), - EnumSet.of(CreateFlag.OVERWRITE), fileSys.getConf().getInt("io.file.buffer.size", 4096), + true, fileSys.getConf().getInt("io.file.buffer.size", 4096), NUM_OF_DATANODES, BLOCK_SIZE, null); stm.write(expected); stm.close(); Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java Mon Jun 21 22:25:11 2010 @@ -611,91 +611,6 @@ public class TestFileCreation extends ju } /** - * Test file creation with all supported flags. - */ - public void testFileCreationWithFlags() throws IOException { - Configuration conf = new HdfsConfiguration(); - if (simulatedStorage) { - conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true); - } - MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null); - FileSystem fs = cluster.getFileSystem(); - Path path = new Path("/" + System.currentTimeMillis() - + "-testFileCreationWithFlags"); - FSDataOutputStream out = null; - - // append to a non-exist file, it should throw an IOException - try { - IOException expectedException = null; - EnumSet appendNoFile = EnumSet.of(CreateFlag.APPEND); - // this should throw a IOException, because the file does not exist - try { - out = createFileWithFlag(fs, path, 1, appendNoFile); - } catch (IOException e) { - expectedException = e; - } finally { - if (out != null) - out.close(); - } - assertTrue( - "Append a non-exists file with no create flag should throw an IOException ", - expectedException != null); - - // the file already exists, and recreate it with CreateFlag.APPEND, - // CreateFlag.CREATE. It will not throw any exception. - EnumSet appendAndCreate = EnumSet.of(CreateFlag.APPEND, - CreateFlag.CREATE); - out = createFileWithFlag(fs, path, 1, appendAndCreate); - out.close(); - - // the file already exists, and recreate it only with CreateFlag.CREATE - // flag. it should throw an IOException - expectedException = null; - EnumSet createExistsFile = EnumSet.of(CreateFlag.CREATE); - // this should throw a IOException, because the file already exists - try { - createFileWithFlag(fs, path, 1, createExistsFile); - } catch (IOException e) { - expectedException = e; - } - assertTrue( - "create a file which already exists should throw an IOException ", - expectedException != null); - - // the file exists, recreate it with the flag of CreateFlag.OVERWRITE. - EnumSet overwriteFile = EnumSet.of(CreateFlag.OVERWRITE); - out = createFileWithFlag(fs, path, 1, overwriteFile); - out.close(); - - // the file exists, recreate it with the flag of CreateFlag.OVERWRITE - // together with CreateFlag.CREATE. It has the same effect as only specify - // CreateFlag.OVERWRITE. - EnumSet overwriteWithCreateFile = EnumSet.of( - CreateFlag.OVERWRITE, CreateFlag.CREATE); - out = createFileWithFlag(fs, path, 1, overwriteWithCreateFile); - out.close(); - - // the file exists, recreate it with the flag of CreateFlag.OVERWRITE - // together with CreateFlag.APPEND. It has the same effect as only specify - // CreateFlag.OVERWRITE. - EnumSet overwriteWithAppendFile = EnumSet.of( - CreateFlag.OVERWRITE, CreateFlag.APPEND); - out = createFileWithFlag(fs, path, 1, overwriteWithAppendFile); - out.close(); - - fs.delete(path, true); - - EnumSet createNonExistsFile = EnumSet.of(CreateFlag.CREATE, - CreateFlag.OVERWRITE); - out = createFileWithFlag(fs, path, 1, createNonExistsFile); - out.close(); - fs.delete(path, true); - } finally { - cluster.shutdown(); - } - } - - /** * Test file creation using createNonRecursive(). */ public void testFileCreationNonRecursive() throws IOException { @@ -788,14 +703,6 @@ public class TestFileCreation extends ju return stm; } - // creates a file with the flag api - static FSDataOutputStream createFileWithFlag(FileSystem fileSys, Path name, int repl, EnumSet flag) - throws IOException { - System.out.println("createFile: Created " + name + " with " + repl + " replica."); - FSDataOutputStream stm = fileSys.create(name, FsPermission.getDefault(), flag, - fileSys.getConf().getInt("io.file.buffer.size", 4096),(short)repl, (long)blockSize, null); - return stm; - } /** * Test that file data becomes available before file is closed. Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/security/TestPermission.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/security/TestPermission.java?rev=956720&r1=956719&r2=956720&view=diff ============================================================================== --- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/security/TestPermission.java (original) +++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/security/TestPermission.java Mon Jun 21 22:25:11 2010 @@ -117,7 +117,7 @@ public class TestPermission extends Test FsPermission filePerm = new FsPermission((short)0444); FSDataOutputStream out = fs.create(new Path("/b1/b2/b3.txt"), filePerm, - EnumSet.of(CreateFlag.OVERWRITE), conf.getInt("io.file.buffer.size", 4096), + true, conf.getInt("io.file.buffer.size", 4096), fs.getDefaultReplication(), fs.getDefaultBlockSize(), null); out.write(123); out.close();