Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 64252 invoked from network); 8 Apr 2009 05:17:11 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 8 Apr 2009 05:17:11 -0000 Received: (qmail 78073 invoked by uid 500); 8 Apr 2009 05:17:10 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 77998 invoked by uid 500); 8 Apr 2009 05:17:10 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 77989 invoked by uid 99); 8 Apr 2009 05:17:10 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 08 Apr 2009 05:17:10 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 08 Apr 2009 05:17:09 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 1092123889CF; Wed, 8 Apr 2009 05:16:49 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r763107 - in /hadoop/core/trunk: ./ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/fs/ftp/ src/core/org/apache/hadoop/fs/kfs/ src/core/org/apache/hadoop/fs/s3/ src... Date: Wed, 08 Apr 2009 05:16:47 -0000 To: core-commits@hadoop.apache.org From: omalley@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090408051649.1092123889CF@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: omalley Date: Wed Apr 8 05:16:47 2009 New Revision: 763107 URL: http://svn.apache.org/viewvc?rev=763107&view=rev Log: HADOOP-5585. Clear FileSystem statistics between tasks when jvm-reuse is enabled. (omalley) Modified: hadoop/core/trunk/CHANGES.txt hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java hadoop/core/trunk/src/examples/org/apache/hadoop/examples/Sort.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRDFSSort.java Modified: hadoop/core/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/CHANGES.txt (original) +++ hadoop/core/trunk/CHANGES.txt Wed Apr 8 05:16:47 2009 @@ -1207,6 +1207,9 @@ HADOOP-5068. Fix NPE in TestCapacityScheduler. (Vinod Kumar Vavilapalli via szetszwo) + HADOOP-5585. Clear FileSystem statistics between tasks when jvm-reuse + is enabled. (omalley) + Release 0.19.2 - Unreleased BUG FIXES Modified: hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java (original) +++ hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java Wed Apr 8 05:16:47 2009 @@ -31,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -117,11 +118,7 @@ throws IOException { Configuration config = new Configuration(false); - LocalFileSystem fs = new LocalFileSystem(); - fs.setConf(config); - // The above call doesn't set the configuration for the underlying - // RawFileSystem. Explicitly doing it. - fs.getRawFileSystem().setConf(config); + LocalFileSystem fs = FileSystem.getLocal(config); String myResourcePath = System.getProperty("test.build.data"); Path schedulerConfigFilePath = @@ -146,9 +143,7 @@ private void cleanUpSchedulerConfigFile() throws IOException { Configuration config = new Configuration(false); - LocalFileSystem fs = new LocalFileSystem(); - fs.setConf(config); - fs.getRawFileSystem().setConf(config); + LocalFileSystem fs = FileSystem.getLocal(config); String myResourcePath = System.getProperty("test.build.data"); Path schedulerConfigFilePath = Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java Wed Apr 8 05:16:47 2009 @@ -23,6 +23,7 @@ import java.net.URI; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; @@ -79,10 +80,6 @@ statisticsTable = new IdentityHashMap, Statistics>(); - /** Recording statistics per FileSystem URI scheme */ - private static final Map statsByUriScheme = - new HashMap(); - /** * The statistics for this file system. */ @@ -128,8 +125,9 @@ * for this FileSystem * @param conf the configuration */ - public abstract void initialize(URI name, Configuration conf) - throws IOException; + public void initialize(URI name, Configuration conf) throws IOException { + statistics = getStatistics(name.getScheme(), getClass()); + } /** Returns a URI whose scheme and authority identify this FileSystem.*/ public abstract URI getUri(); @@ -303,7 +301,6 @@ protected FileSystem() { super(null); - statistics = getStatistics(this.getClass()); } /** Check that a Path belongs to this FileSystem. */ @@ -1407,7 +1404,6 @@ } FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf); fs.initialize(uri, conf); - statsByUriScheme.put(uri.getScheme(), fs.statistics); return fs; } @@ -1537,9 +1533,14 @@ } public static final class Statistics { + private final String scheme; private AtomicLong bytesRead = new AtomicLong(); private AtomicLong bytesWritten = new AtomicLong(); + public Statistics(String scheme) { + this.scheme = scheme; + } + /** * Increment the bytes read in the statistics * @param newBytes the additional bytes read @@ -1576,32 +1577,65 @@ return bytesRead + " bytes read and " + bytesWritten + " bytes written"; } + + /** + * Reset the counts of bytes to 0. + */ + public void reset() { + bytesWritten.set(0); + bytesRead.set(0); + } + + /** + * Get the uri scheme associated with this statistics object. + * @return the schema associated with this set of statistics + */ + public String getScheme() { + return scheme; + } } /** * Get the Map of Statistics object indexed by URI Scheme. * @return a Map having a key as URI scheme and value as Statistics object + * @deprecated use {@link #getAllStatistics} instead */ public static synchronized Map getStatistics() { - return statsByUriScheme; + Map result = new HashMap(); + for(Statistics stat: statisticsTable.values()) { + result.put(stat.getScheme(), stat); + } + return result; + } + + /** + * Return the FileSystem classes that have Statistics + */ + public static synchronized List getAllStatistics() { + return new ArrayList(statisticsTable.values()); } /** * Get the statistics for a particular file system - * @deprecated Consider using {@link #getStatistics()} instead. * @param cls the class to lookup * @return a statistics object */ public static synchronized - Statistics getStatistics(Class cls) { + Statistics getStatistics(String scheme, Class cls) { Statistics result = statisticsTable.get(cls); if (result == null) { - result = new Statistics(); + result = new Statistics(scheme); statisticsTable.put(cls, result); } return result; } + public static synchronized void clearStatistics() { + for(Statistics stat: statisticsTable.values()) { + stat.reset(); + } + } + public static synchronized void printStatistics() throws IOException { for (Map.Entry, Statistics> pair: Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java Wed Apr 8 05:16:47 2009 @@ -59,7 +59,8 @@ public URI getUri() { return NAME; } - public void initialize(URI uri, Configuration conf) { + public void initialize(URI uri, Configuration conf) throws IOException { + super.initialize(uri, conf); setConf(conf); } Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java Wed Apr 8 05:16:47 2009 @@ -56,6 +56,7 @@ @Override public void initialize(URI uri, Configuration conf) throws IOException { // get + super.initialize(uri, conf); // get host information from uri (overrides info in conf) String host = uri.getHost(); host = (host == null) ? conf.get("fs.ftp.host", null) : host; Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java Wed Apr 8 05:16:47 2009 @@ -62,27 +62,29 @@ @Override public void initialize(URI uri, Configuration conf) throws IOException { - try { - if (kfsImpl == null) { - if (uri.getHost() == null) { - kfsImpl = new KFSImpl(conf.get("fs.kfs.metaServerHost", ""), - conf.getInt("fs.kfs.metaServerPort", -1), - statistics); - } else { - kfsImpl = new KFSImpl(uri.getHost(), uri.getPort(), statistics); - } - } - - this.localFs = FileSystem.getLocal(conf); - this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); - this.workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this); - setConf(conf); - - } catch (Exception e) { - e.printStackTrace(); - System.out.println("Unable to initialize KFS"); - System.exit(-1); + super.initialize(uri, conf); + try { + if (kfsImpl == null) { + if (uri.getHost() == null) { + kfsImpl = new KFSImpl(conf.get("fs.kfs.metaServerHost", ""), + conf.getInt("fs.kfs.metaServerPort", -1), + statistics); + } else { + kfsImpl = new KFSImpl(uri.getHost(), uri.getPort(), statistics); + } } + + this.localFs = FileSystem.getLocal(conf); + this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); + this.workingDir = new Path("/user", System.getProperty("user.name") + ).makeQualified(this); + setConf(conf); + + } catch (Exception e) { + e.printStackTrace(); + System.out.println("Unable to initialize KFS"); + System.exit(-1); + } } @Override Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java Wed Apr 8 05:16:47 2009 @@ -70,6 +70,7 @@ @Override public void initialize(URI uri, Configuration conf) throws IOException { + super.initialize(uri, conf); if (store == null) { store = createDefaultStore(conf); } Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Wed Apr 8 05:16:47 2009 @@ -209,6 +209,7 @@ @Override public void initialize(URI uri, Configuration conf) throws IOException { + super.initialize(uri, conf); if (store == null) { store = createDefaultStore(conf); } Modified: hadoop/core/trunk/src/examples/org/apache/hadoop/examples/Sort.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/Sort.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/examples/org/apache/hadoop/examples/Sort.java (original) +++ hadoop/core/trunk/src/examples/org/apache/hadoop/examples/Sort.java Wed Apr 8 05:16:47 2009 @@ -51,6 +51,7 @@ * in-dir out-dir */ public class Sort extends Configured implements Tool { + private RunningJob jobResult = null; static int printUsage() { System.out.println("sort [-m ] [-r ] " + @@ -172,7 +173,7 @@ " with " + num_reduces + " reduces."); Date startTime = new Date(); System.out.println("Job started: " + startTime); - JobClient.runJob(jobConf); + jobResult = JobClient.runJob(jobConf); Date end_time = new Date(); System.out.println("Job ended: " + end_time); System.out.println("The job took " + @@ -187,4 +188,11 @@ System.exit(res); } + /** + * Get the last job that was run using this instance. + * @return the results of the last job that was run + */ + public RunningJob getResult() { + return jobResult; + } } Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Wed Apr 8 05:16:47 2009 @@ -80,7 +80,7 @@ private int socketTimeout; private int datanodeWriteTimeout; final int writePacketSize; - private FileSystem.Statistics stats; + private final FileSystem.Statistics stats; private int maxBlockAcquireFailures; @@ -145,7 +145,7 @@ * Create a new DFSClient connected to the default namenode. */ public DFSClient(Configuration conf) throws IOException { - this(NameNode.getAddress(conf), conf); + this(NameNode.getAddress(conf), conf, null); } /** @@ -188,8 +188,7 @@ public DFSClient(InetSocketAddress nameNodeAddr, Configuration conf) throws IOException { - this(nameNodeAddr, conf, - FileSystem.getStatistics(DistributedFileSystem.class)); + this(nameNodeAddr, conf, null); } private void checkOpen() throws IOException { Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java Wed Apr 8 05:16:47 2009 @@ -24,7 +24,6 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.Block; @@ -35,7 +34,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream; import org.apache.hadoop.security.AccessControlException; -import org.apache.hadoop.util.*; +import org.apache.hadoop.util.Progressable; /**************************************************************** @@ -70,6 +69,7 @@ @Override public void initialize(URI uri, Configuration conf) throws IOException { + super.initialize(uri, conf); setConf(conf); String host = uri.getHost(); Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Wed Apr 8 05:16:47 2009 @@ -81,6 +81,7 @@ @Override public void initialize(URI name, Configuration conf) throws IOException { + super.initialize(name, conf); setConf(conf); try { this.ugi = UnixUserGroupInformation.login(conf, true); Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java (original) +++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java Wed Apr 8 05:16:47 2009 @@ -130,7 +130,9 @@ task = myTask.getTask(); taskid = task.getTaskID(); isCleanup = task.isTaskCleanupTask(); - + // reset the statistics for the task + FileSystem.clearStatistics(); + //create the index file so that the log files //are viewable immediately TaskLog.syncLogs(firstTaskid, taskid, isCleanup); Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java (original) +++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java Wed Apr 8 05:16:47 2009 @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.Text; @@ -634,15 +635,14 @@ new HashMap(); private synchronized void updateCounters() { - for(Map.Entry entry : - FileSystem.getStatistics().entrySet()) { - String uriScheme = entry.getKey(); + for(Statistics stat: FileSystem.getAllStatistics()) { + String uriScheme = stat.getScheme(); FileSystemStatisticUpdater updater = statisticUpdaters.get(uriScheme); if(updater==null) {//new FileSystem has been found in the cache - updater = new FileSystemStatisticUpdater(uriScheme, entry.getValue()); + updater = new FileSystemStatisticUpdater(uriScheme, stat); statisticUpdaters.put(uriScheme, updater); } - updater.updateCounters(); + updater.updateCounters(); } } Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original) +++ hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Wed Apr 8 05:16:47 2009 @@ -407,9 +407,7 @@ public void testClose() throws IOException { Configuration conf = new Configuration(); - LocalFileSystem fs = new LocalFileSystem(); - fs.setConf(conf); - fs.getRawFileSystem().setConf(conf); + LocalFileSystem fs = FileSystem.getLocal(conf); // create a sequence file 1 Path path1 = new Path(System.getProperty("test.build.data",".")+"/test1.seq"); Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRDFSSort.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRDFSSort.java?rev=763107&r1=763106&r2=763107&view=diff ============================================================================== --- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRDFSSort.java (original) +++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRDFSSort.java Wed Apr 8 05:16:47 2009 @@ -94,7 +94,17 @@ String[] sortArgs = {sortInput.toString(), sortOutput.toString()}; // Run Sort - assertEquals(ToolRunner.run(job, new Sort(), sortArgs), 0); + Sort sort = new Sort(); + assertEquals(ToolRunner.run(job, sort, sortArgs), 0); + Counters counters = sort.getResult().getCounters(); + long mapInput = counters.findCounter(Task.Counter.MAP_INPUT_BYTES + ).getValue(); + long hdfsRead = counters.findCounter(Task.FILESYSTEM_COUNTER_GROUP, + "HDFS_BYTES_READ").getValue(); + // the hdfs read should be between 100% and 110% of the map input bytes + assertTrue("map input = " + mapInput + ", hdfs read = " + hdfsRead, + (hdfsRead < (mapInput * 1.1)) && + (hdfsRead > mapInput)); } private static void runSortValidator(JobConf job, @@ -139,8 +149,6 @@ job.setNumReduceTasks(0); RunningJob result = JobClient.runJob(job); long uses = result.getCounters().findCounter("jvm", "use").getValue(); - System.out.println("maps = " + job.getNumMapTasks()); - System.out.println(result.getCounters()); int maps = job.getNumMapTasks(); if (reuse) { assertTrue("maps = " + maps + ", uses = " + uses, maps < uses);