Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 0379C10690 for ; Mon, 30 Dec 2013 19:36:52 +0000 (UTC) Received: (qmail 33969 invoked by uid 500); 30 Dec 2013 19:36:51 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 33932 invoked by uid 500); 30 Dec 2013 19:36:51 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 33924 invoked by uid 99); 30 Dec 2013 19:36:51 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 30 Dec 2013 19:36:51 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 30 Dec 2013 19:36:48 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 7DD1E2388906; Mon, 30 Dec 2013 19:36:27 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1554297 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java Date: Mon, 30 Dec 2013 19:36:27 -0000 To: hdfs-commits@hadoop.apache.org From: cmccabe@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131230193627.7DD1E2388906@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: cmccabe Date: Mon Dec 30 19:36:26 2013 New Revision: 1554297 URL: http://svn.apache.org/r1554297 Log: HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (satish via cmccabe) Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1554297&r1=1554296&r2=1554297&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Mon Dec 30 19:36:26 2013 @@ -435,6 +435,9 @@ Release 2.3.0 - UNRELEASED HDFS-5661. Browsing FileSystem via web ui, should use datanode's fqdn instead of ip address. (Benoy Antony via jing9) + HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (sathish + via cmccabe) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java?rev=1554297&r1=1554296&r2=1554297&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java Mon Dec 30 19:36:26 2013 @@ -29,6 +29,7 @@ import java.util.Map; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; @@ -85,9 +86,9 @@ public class GetConf extends Configured map.put(BACKUP.getName().toLowerCase(), new BackupNodesCommandHandler()); map.put(INCLUDE_FILE.getName().toLowerCase(), - new CommandHandler("DFSConfigKeys.DFS_HOSTS")); + new CommandHandler(DFSConfigKeys.DFS_HOSTS)); map.put(EXCLUDE_FILE.getName().toLowerCase(), - new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE")); + new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE)); map.put(NNRPCADDRESSES.getName().toLowerCase(), new NNRpcAddressesCommandHandler()); map.put(CONFKEY.getName().toLowerCase(), Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java?rev=1554297&r1=1554296&r2=1554297&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java Mon Dec 30 19:36:26 2013 @@ -33,10 +33,15 @@ import java.io.PrintStream; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; import org.apache.hadoop.hdfs.HdfsConfiguration; @@ -55,7 +60,7 @@ public class TestGetConf { enum TestType { NAMENODE, BACKUP, SECONDARY, NNRPCADDRESSES } - + FileSystem localFileSys; /** Setup federation nameServiceIds in the configuration */ private void setupNameServices(HdfsConfiguration conf, int nameServiceIdCount) { StringBuilder nsList = new StringBuilder(); @@ -379,4 +384,70 @@ public class TestGetConf { } } } + @Test + public void TestGetConfExcludeCommand() throws Exception{ + HdfsConfiguration conf = new HdfsConfiguration(); + // Set up the hosts/exclude files. + localFileSys = FileSystem.getLocal(conf); + Path workingDir = localFileSys.getWorkingDirectory(); + Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); + Path hostsFile = new Path(dir, "hosts"); + Path excludeFile = new Path(dir, "exclude"); + + // Setup conf + conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); + conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); + writeConfigFile(hostsFile, null); + writeConfigFile(excludeFile, null); + String[] args = {"-excludeFile"}; + String ret = runTool(conf, args, true); + assertEquals(excludeFile.toUri().getPath(),ret.trim()); + cleanupFile(localFileSys, excludeFile.getParent()); + } + + @Test + public void TestGetConfIncludeCommand() throws Exception{ + HdfsConfiguration conf = new HdfsConfiguration(); + // Set up the hosts/exclude files. + localFileSys = FileSystem.getLocal(conf); + Path workingDir = localFileSys.getWorkingDirectory(); + Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); + Path hostsFile = new Path(dir, "hosts"); + Path excludeFile = new Path(dir, "exclude"); + + // Setup conf + conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); + conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); + writeConfigFile(hostsFile, null); + writeConfigFile(excludeFile, null); + String[] args = {"-includeFile"}; + String ret = runTool(conf, args, true); + assertEquals(hostsFile.toUri().getPath(),ret.trim()); + cleanupFile(localFileSys, excludeFile.getParent()); + } + + private void writeConfigFile(Path name, ArrayList nodes) + throws IOException { + // delete if it already exists + if (localFileSys.exists(name)) { + localFileSys.delete(name, true); + } + + FSDataOutputStream stm = localFileSys.create(name); + + if (nodes != null) { + for (Iterator it = nodes.iterator(); it.hasNext();) { + String node = it.next(); + stm.writeBytes(node); + stm.writeBytes("\n"); + } + } + stm.close(); + } + + private void cleanupFile(FileSystem fileSys, Path name) throws IOException { + assertTrue(fileSys.exists(name)); + fileSys.delete(name, true); + assertTrue(!fileSys.exists(name)); + } }