Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E0F861019C for ; Mon, 23 Sep 2013 17:58:03 +0000 (UTC) Received: (qmail 17317 invoked by uid 500); 23 Sep 2013 17:57:54 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 17294 invoked by uid 500); 23 Sep 2013 17:57:53 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 17192 invoked by uid 99); 23 Sep 2013 17:57:50 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 23 Sep 2013 17:57:50 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 23 Sep 2013 17:57:45 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id E275D23889E1; Mon, 23 Sep 2013 17:57:23 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1525659 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test: java/org/apache/hadoop/hdfs/TestDFSShell.java resources/testHDFSConf.xml Date: Mon, 23 Sep 2013 17:57:23 -0000 To: hdfs-commits@hadoop.apache.org From: arp@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20130923175723.E275D23889E1@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: arp Date: Mon Sep 23 17:57:23 2013 New Revision: 1525659 URL: http://svn.apache.org/r1525659 Log: HDFS-5139. Remove redundant -R option from setrep. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java?rev=1525659&r1=1525658&r2=1525659&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java Mon Sep 23 17:57:23 2013 @@ -61,6 +61,8 @@ import static org.junit.Assert.*; public class TestDFSShell { private static final Log LOG = LogFactory.getLog(TestDFSShell.class); private static AtomicInteger counter = new AtomicInteger(); + private final int SUCCESS = 0; + private final int ERROR = 1; static final String TEST_ROOT_DIR = PathUtils.getTestDirName(TestDFSShell.class); @@ -1619,9 +1621,6 @@ public class TestDFSShell { // force Copy Option is -f @Test (timeout = 30000) public void testCopyCommandsWithForceOption() throws Exception { - final int SUCCESS = 0; - final int ERROR = 1; - Configuration conf = new Configuration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) .format(true).build(); @@ -1682,7 +1681,55 @@ public class TestDFSShell { } cluster.shutdown(); } + } + + // setrep for file and directory. + @Test (timeout = 30000) + public void testSetrep() throws Exception { + + Configuration conf = new Configuration(); + MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) + .format(true).build(); + FsShell shell = null; + FileSystem fs = null; + + final String testdir1 = "/tmp/TestDFSShell-testSetrep-" + counter.getAndIncrement(); + final String testdir2 = testdir1 + "/nestedDir"; + final Path hdfsFile1 = new Path(testdir1, "testFileForSetrep"); + final Path hdfsFile2 = new Path(testdir2, "testFileForSetrep"); + final Short oldRepFactor = new Short((short) 1); + final Short newRepFactor = new Short((short) 3); + try { + String[] argv; + cluster.waitActive(); + fs = cluster.getFileSystem(); + assertThat(fs.mkdirs(new Path(testdir2)), is(true)); + shell = new FsShell(conf); + + fs.create(hdfsFile1, true).close(); + fs.create(hdfsFile2, true).close(); + + // Tests for setrep on a file. + argv = new String[] { "-setrep", newRepFactor.toString(), hdfsFile1.toString() }; + assertThat(shell.run(argv), is(SUCCESS)); + assertThat(fs.getFileStatus(hdfsFile1).getReplication(), is(newRepFactor)); + assertThat(fs.getFileStatus(hdfsFile2).getReplication(), is(oldRepFactor)); + + // Tests for setrep + + // Tests for setrep on a directory and make sure it is applied recursively. + argv = new String[] { "-setrep", newRepFactor.toString(), testdir1 }; + assertThat(shell.run(argv), is(SUCCESS)); + assertThat(fs.getFileStatus(hdfsFile1).getReplication(), is(newRepFactor)); + assertThat(fs.getFileStatus(hdfsFile2).getReplication(), is(newRepFactor)); + + } finally { + if (shell != null) { + shell.close(); + } + cluster.shutdown(); + } } /** Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml?rev=1525659&r1=1525658&r2=1525659&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml Mon Sep 23 17:57:23 2013 @@ -6049,7 +6049,7 @@ -fs NAMENODE -mkdir /dir0 -fs NAMENODE -touchz /dir0/file0 -fs NAMENODE -touchz /dir0/file1 - -fs NAMENODE -setrep -R 2 /dir0 + -fs NAMENODE -setrep 2 /dir0 -fs NAMENODE -rm -r /user @@ -6072,7 +6072,7 @@ -fs NAMENODE -mkdir -p dir0 -fs NAMENODE -touchz dir0/file0 -fs NAMENODE -touchz dir0/file1 - -fs NAMENODE -setrep -R 2 dir0 + -fs NAMENODE -setrep 2 dir0 -fs NAMENODE -rm -r /user @@ -6090,6 +6090,24 @@ + setrep: -R ignored for existing file + + -fs NAMENODE -mkdir -p dir0 + -fs NAMENODE -touchz dir0/file0 + -fs NAMENODE -setrep -R 2 dir0/file0 + + + -fs NAMENODE -rm -r /user + + + + RegexpComparator + ^Replication 2 set: dir0/file0 + + + + + setrep: non existent file (absolute path) -fs NAMENODE -setrep 2 /dir0/file @@ -6145,7 +6163,7 @@ -fs NAMENODE -mkdir hdfs:///dir0/ -fs NAMENODE -touchz hdfs:///dir0/file0 -fs NAMENODE -touchz hdfs:///dir0/file1 - -fs NAMENODE -setrep -R 2 hdfs:///dir0 + -fs NAMENODE -setrep 2 hdfs:///dir0 -fs NAMENODE -rm -r hdfs:///* @@ -6203,7 +6221,7 @@ -fs NAMENODE -mkdir -p NAMENODE/dir0 -fs NAMENODE -touchz NAMENODE/dir0/file0 -fs NAMENODE -touchz NAMENODE/dir0/file1 - -fs NAMENODE -setrep -R 2 NAMENODE/dir0 + -fs NAMENODE -setrep 2 NAMENODE/dir0 -fs NAMENODE -rm -r NAMENODE/*