Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 8A97B10971 for ; Sat, 27 Apr 2013 16:16:45 +0000 (UTC) Received: (qmail 92492 invoked by uid 500); 27 Apr 2013 16:16:45 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 92429 invoked by uid 500); 27 Apr 2013 16:16:45 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 92421 invoked by uid 99); 27 Apr 2013 16:16:45 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 27 Apr 2013 16:16:45 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 27 Apr 2013 16:16:41 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 2F6C023888E3; Sat, 27 Apr 2013 16:16:20 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1476610 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/test/java/org/apache/hadoop/hdfs/server/namenode/ Date: Sat, 27 Apr 2013 16:16:19 -0000 To: hdfs-commits@hadoop.apache.org From: suresh@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130427161620.2F6C023888E3@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: suresh Date: Sat Apr 27 16:16:19 2013 New Revision: 1476610 URL: http://svn.apache.org/r1476610 Log: HDFS-4705. Address HDFS test failures on Windows because of invalid dfs.namenode.name.dir. Contributed by Ivan Mitic. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAllowFormat.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Sat Apr 27 16:16:19 2013 @@ -323,6 +323,9 @@ Trunk (Unreleased) HDFS-4722. TestGetConf#testFederation times out on Windows. (Ivan Mitic via suresh) + HDFS-4705. Address HDFS test failures on Windows because of invalid + dfs.namenode.name.dir. (Ivan Mitic via suresh) + BREAKDOWN OF HDFS-347 SUBTASKS AND RELATED JIRAS HDFS-4353. Encapsulate connections to peers in Peer and PeerServer classes. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAllowFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAllowFormat.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAllowFormat.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAllowFormat.java Sat Apr 27 16:16:19 2013 @@ -169,6 +169,8 @@ public class TestAllowFormat { InetSocketAddress nnAddr2 = new InetSocketAddress(localhost, 9020); HATestUtil.setFailoverConfigurations(conf, logicalName, nnAddr1, nnAddr2); + conf.set(DFS_NAMENODE_NAME_DIR_KEY, + new File(hdfsDir, "name").getAbsolutePath()); conf.setBoolean(DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY, true); conf.set(DFSUtil.addKeySuffixes( DFSConfigKeys.DFS_NAMENODE_EDITS_PLUGIN_PREFIX, "dummy"), Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java Sat Apr 27 16:16:19 2013 @@ -750,9 +750,12 @@ public class TestCheckpoint { @Test public void testSeparateEditsDirLocking() throws IOException { Configuration conf = new HdfsConfiguration(); - File editsDir = new File(MiniDFSCluster.getBaseDirectory() + - "/testSeparateEditsDirLocking"); - + File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); + File editsDir = new File(MiniDFSCluster.getBaseDirectory(), + "testSeparateEditsDirLocking"); + + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, editsDir.getAbsolutePath()); MiniDFSCluster cluster = null; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java Sat Apr 27 16:16:19 2013 @@ -19,20 +19,30 @@ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY; import static org.junit.Assert.assertEquals; +import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Collection; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; +import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; +import org.junit.After; import org.junit.Test; public class TestFSNamesystem { + @After + public void cleanUp() { + FileUtil.fullyDeleteContents(new File(MiniDFSCluster.getBaseDirectory())); + } + /** * Tests that the namenode edits dirs are gotten with duplicates removed */ @@ -54,6 +64,9 @@ public class TestFSNamesystem { @Test public void testFSNamespaceClearLeases() throws Exception { Configuration conf = new HdfsConfiguration(); + File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); + conf.set(DFS_NAMENODE_NAME_DIR_KEY, nameDir.getAbsolutePath()); + NameNode.initMetrics(conf, NamenodeRole.NAMENODE); DFSTestUtil.formatNameNode(conf); FSNamesystem fsn = FSNamesystem.loadFromDisk(conf); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNNThroughputBenchmark.java Sat Apr 27 16:16:19 2013 @@ -17,23 +17,35 @@ */ package org.apache.hadoop.hdfs.server.namenode; +import java.io.File; import java.util.Arrays; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.junit.After; import org.junit.Test; public class TestNNThroughputBenchmark { + @After + public void cleanUp() { + FileUtil.fullyDeleteContents(new File(MiniDFSCluster.getBaseDirectory())); + } + /** * This test runs all benchmarks defined in {@link NNThroughputBenchmark}. */ @Test public void testNNThroughput() throws Exception { Configuration conf = new HdfsConfiguration(); + File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); FileSystem.setDefaultUri(conf, "hdfs://localhost:" + 0); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:0"); DFSTestUtil.formatNameNode(conf); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java Sat Apr 27 16:16:19 2013 @@ -322,12 +322,15 @@ public class TestNameEditsConfigs { MiniDFSCluster cluster = null; File nameAndEditsDir = new File(base_dir, "name_and_edits"); File nameAndEditsDir2 = new File(base_dir, "name_and_edits2"); + File nameDir = new File(base_dir, "name"); // 1 // Bad configuration. Add a directory to dfs.namenode.edits.dir.required // without adding it to dfs.namenode.edits.dir. try { Configuration conf = new HdfsConfiguration(); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); conf.set( DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_REQUIRED_KEY, nameAndEditsDir2.toURI().toString()); @@ -353,6 +356,8 @@ public class TestNameEditsConfigs { // and dfs.namenode.edits.dir. try { Configuration conf = new HdfsConfiguration(); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); conf.setStrings( DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, nameAndEditsDir.toURI().toString(), @@ -375,6 +380,8 @@ public class TestNameEditsConfigs { // dfs.namenode.edits.dir.required. try { Configuration conf = new HdfsConfiguration(); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); conf.setStrings( DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, nameAndEditsDir.toURI().toString(), Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java?rev=1476610&r1=1476609&r2=1476610&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java Sat Apr 27 16:16:19 2013 @@ -31,6 +31,7 @@ import org.apache.hadoop.hdfs.DFSTestUti import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.test.GenericTestUtils; +import org.junit.After; import org.junit.Test; /** @@ -39,6 +40,11 @@ import org.junit.Test; */ public class TestValidateConfigurationSettings { + @After + public void cleanUp() { + FileUtil.fullyDeleteContents(new File(MiniDFSCluster.getBaseDirectory())); + } + /** * Tests setting the rpc port to the same as the web port to test that * an exception @@ -49,6 +55,10 @@ public class TestValidateConfigurationSe throws IOException { Configuration conf = new HdfsConfiguration(); + File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); + // set both of these to port 9000, should fail FileSystem.setDefaultUri(conf, "hdfs://localhost:9000"); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000"); @@ -72,6 +82,10 @@ public class TestValidateConfigurationSe throws IOException { Configuration conf = new HdfsConfiguration(); + File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); + conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, + nameDir.getAbsolutePath()); + FileSystem.setDefaultUri(conf, "hdfs://localhost:8000"); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000"); DFSTestUtil.formatNameNode(conf);