Return-Path: X-Original-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id AE30DDDC7 for ; Wed, 26 Sep 2012 18:33:14 +0000 (UTC) Received: (qmail 76295 invoked by uid 500); 26 Sep 2012 18:33:14 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 76232 invoked by uid 500); 26 Sep 2012 18:33:14 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 76224 invoked by uid 99); 26 Sep 2012 18:33:14 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 26 Sep 2012 18:33:14 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 26 Sep 2012 18:33:11 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id E839823888CD; Wed, 26 Sep 2012 18:32:26 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1390642 - in /hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop: mapred/ mapreduce/lib/input/ Date: Wed, 26 Sep 2012 18:32:26 -0000 To: mapreduce-commits@hadoop.apache.org From: acmurthy@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120926183226.E839823888CD@eris.apache.org> Author: acmurthy Date: Wed Sep 26 18:32:24 2012 New Revision: 1390642 URL: http://svn.apache.org/viewvc?rev=1390642&view=rev Log: HDFS-3910. Revert from branch-2.0.2-alpha. Modified: hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java Modified: hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java?rev=1390642&r1=1390641&r2=1390642&view=diff ============================================================================== --- hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java (original) +++ hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java Wed Sep 26 18:32:24 2012 @@ -23,7 +23,6 @@ import static org.mockito.Mockito.when; import java.io.DataOutputStream; import java.io.IOException; -import java.util.concurrent.TimeoutException; import junit.framework.TestCase; @@ -96,7 +95,7 @@ public class TestFileInputFormat extends } private void createInputs(FileSystem fs, Path inDir, String fileName) - throws IOException, TimeoutException, InterruptedException { + throws IOException { // create a multi-block file on hdfs Path path = new Path(inDir, fileName); final short replication = 2; @@ -158,7 +157,7 @@ public class TestFileInputFormat extends } } - public void testMultiLevelInput() throws Exception { + public void testMultiLevelInput() throws IOException { JobConf job = new JobConf(conf); job.setBoolean("dfs.replication.considerLoad", false); @@ -292,8 +291,7 @@ public class TestFileInputFormat extends } static void writeFile(Configuration conf, Path name, - short replication, int numBlocks) - throws IOException, TimeoutException, InterruptedException { + short replication, int numBlocks) throws IOException { FileSystem fileSys = FileSystem.get(conf); FSDataOutputStream stm = fileSys.create(name, true, Modified: hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java?rev=1390642&r1=1390641&r2=1390642&view=diff ============================================================================== --- hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java (original) +++ hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java Wed Sep 26 18:32:24 2012 @@ -71,13 +71,13 @@ public class TestMultipleLevelCaching ex return rack.toString(); } - public void testMultiLevelCaching() throws Exception { + public void testMultiLevelCaching() throws IOException { for (int i = 1 ; i <= MAX_LEVEL; ++i) { testCachingAtLevel(i); } } - private void testCachingAtLevel(int level) throws Exception { + private void testCachingAtLevel(int level) throws IOException { String namenode = null; MiniDFSCluster dfs = null; MiniMRCluster mr = null; Modified: hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java?rev=1390642&r1=1390641&r2=1390642&view=diff ============================================================================== --- hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java (original) +++ hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java Wed Sep 26 18:32:24 2012 @@ -31,7 +31,6 @@ import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Properties; -import java.util.concurrent.TimeoutException; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -450,14 +449,11 @@ public class UtilsForTests { static void signalTasks(MiniDFSCluster dfs, FileSystem fileSys, String mapSignalFile, String reduceSignalFile, int replication) - throws IOException, TimeoutException { - try { - writeFile(dfs.getNameNode(), fileSys.getConf(), new Path(mapSignalFile), - (short)replication); - writeFile(dfs.getNameNode(), fileSys.getConf(), new Path(reduceSignalFile), (short)replication); - } catch (InterruptedException ie) { - // Ignore - } + throws IOException { + writeFile(dfs.getNameNode(), fileSys.getConf(), new Path(mapSignalFile), + (short)replication); + writeFile(dfs.getNameNode(), fileSys.getConf(), new Path(reduceSignalFile), + (short)replication); } /** @@ -466,16 +462,12 @@ public class UtilsForTests { static void signalTasks(MiniDFSCluster dfs, FileSystem fileSys, boolean isMap, String mapSignalFile, String reduceSignalFile) - throws IOException, TimeoutException { - try { - // signal the maps to complete - writeFile(dfs.getNameNode(), fileSys.getConf(), - isMap - ? new Path(mapSignalFile) - : new Path(reduceSignalFile), (short)1); - } catch (InterruptedException ie) { - // Ignore - } + throws IOException { + // signal the maps to complete + writeFile(dfs.getNameNode(), fileSys.getConf(), + isMap + ? new Path(mapSignalFile) + : new Path(reduceSignalFile), (short)1); } static String getSignalFile(Path dir) { @@ -491,8 +483,7 @@ public class UtilsForTests { } static void writeFile(NameNode namenode, Configuration conf, Path name, - short replication) - throws IOException, TimeoutException, InterruptedException { + short replication) throws IOException { FileSystem fileSys = FileSystem.get(conf); SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf, name, Modified: hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java?rev=1390642&r1=1390641&r2=1390642&view=diff ============================================================================== --- hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java (original) +++ hadoop/common/branches/branch-2.0.2-alpha/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java Wed Sep 26 18:32:24 2012 @@ -23,7 +23,6 @@ import java.net.URI; import java.util.List; import java.util.ArrayList; import java.util.zip.GZIPOutputStream; -import java.util.concurrent.TimeoutException; import junit.framework.TestCase; @@ -279,7 +278,7 @@ public class TestCombineFileInputFormat assertFalse(rr.nextKeyValue()); } - public void testSplitPlacement() throws Exception { + public void testSplitPlacement() throws IOException { MiniDFSCluster dfs = null; FileSystem fileSys = null; try { @@ -679,8 +678,7 @@ public class TestCombineFileInputFormat } static void writeFile(Configuration conf, Path name, - short replication, int numBlocks) - throws IOException, TimeoutException, InterruptedException { + short replication, int numBlocks) throws IOException { FileSystem fileSys = FileSystem.get(conf); FSDataOutputStream stm = fileSys.create(name, true, @@ -691,8 +689,7 @@ public class TestCombineFileInputFormat // Creates the gzip file and return the FileStatus static FileStatus writeGzipFile(Configuration conf, Path name, - short replication, int numBlocks) - throws IOException, TimeoutException, InterruptedException { + short replication, int numBlocks) throws IOException { FileSystem fileSys = FileSystem.get(conf); GZIPOutputStream out = new GZIPOutputStream(fileSys.create(name, true, conf @@ -702,8 +699,7 @@ public class TestCombineFileInputFormat } private static void writeDataAndSetReplication(FileSystem fileSys, Path name, - OutputStream out, short replication, int numBlocks) - throws IOException, TimeoutException, InterruptedException { + OutputStream out, short replication, int numBlocks) throws IOException { for (int i = 0; i < numBlocks; i++) { out.write(databuf); } @@ -711,7 +707,7 @@ public class TestCombineFileInputFormat DFSTestUtil.waitReplication(fileSys, name, replication); } - public void testSplitPlacementForCompressedFiles() throws Exception { + public void testSplitPlacementForCompressedFiles() throws IOException { MiniDFSCluster dfs = null; FileSystem fileSys = null; try { @@ -1062,7 +1058,7 @@ public class TestCombineFileInputFormat /** * Test that CFIF can handle missing blocks. */ - public void testMissingBlocks() throws Exception { + public void testMissingBlocks() throws IOException { String namenode = null; MiniDFSCluster dfs = null; FileSystem fileSys = null;