hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From acmur...@apache.org
Subject svn commit: r647060 - in /hadoop/core/branches/branch-0.17: ./ src/examples/org/apache/hadoop/examples/ src/test/org/apache/hadoop/mapred/
Date Fri, 11 Apr 2008 06:03:47 GMT
Author: acmurthy
Date: Thu Apr 10 23:03:45 2008
New Revision: 647060

URL: http://svn.apache.org/viewvc?rev=647060&view=rev
Log:
Merge -r 647057:647058 from trunk to branch-0.17 to fix HADOOP-3174

Added:
    hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
      - copied unchanged from r647058, hadoop/core/trunk/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
Modified:
    hadoop/core/branches/branch-0.17/CHANGES.txt
    hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/ExampleDriver.java
    hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/TestMultiFileInputFormat.java

Modified: hadoop/core/branches/branch-0.17/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/CHANGES.txt?rev=647060&r1=647059&r2=647060&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.17/CHANGES.txt Thu Apr 10 23:03:45 2008
@@ -216,6 +216,9 @@
     as Hudson generates false negatives under the current load.
     (Nigel Daley via cdouglas)
 
+    HADOOP-3174. Illustrative example for MultipleFileInputFormat. (Enis
+    Soztutar via acmurthy)  
+
   OPTIMIZATIONS
 
     HADOOP-2790.  Fixed inefficient method hasSpeculativeTask by removing

Modified: hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/ExampleDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/ExampleDriver.java?rev=647060&r1=647059&r2=647060&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/ExampleDriver.java
(original)
+++ hadoop/core/branches/branch-0.17/src/examples/org/apache/hadoop/examples/ExampleDriver.java
Thu Apr 10 23:03:45 2008
@@ -49,6 +49,7 @@
       pgd.addClass("sudoku", Sudoku.class, "A sudoku solver.");
       pgd.addClass("sleep", SleepJob.class, "A job that sleeps at each map and reduce task.");
       pgd.addClass("join", Join.class, "A job that effects a join over sorted, equally partitioned
datasets");
+      pgd.addClass("multifilewc", MultiFileWordCount.class, "A job that counts words from
several files.");
       pgd.driver(argv);
     }
     catch(Throwable e){

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/TestMultiFileInputFormat.java?rev=647060&r1=647059&r2=647060&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
(original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
Thu Apr 10 23:03:45 2008
@@ -22,13 +22,14 @@
 import java.util.HashMap;
 import java.util.Random;
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-
-import junit.framework.TestCase;
+import org.apache.hadoop.io.Text;
 
 public class TestMultiFileInputFormat extends TestCase{
 
@@ -46,9 +47,9 @@
   private HashMap<String, Long> lengths = new HashMap<String, Long>();
   
   /** Dummy class to extend MultiFileInputFormat*/
-  private class DummyMultiFileInputFormat extends MultiFileInputFormat {
+  private class DummyMultiFileInputFormat extends MultiFileInputFormat<Text, Text>
{
     @Override
-    public RecordReader getRecordReader(InputSplit split, JobConf job
+    public RecordReader<Text,Text> getRecordReader(InputSplit split, JobConf job
         , Reporter reporter) throws IOException {
       return null;
     }
@@ -89,7 +90,7 @@
       LOG.info("Number of files increment = " + NUM_FILES_INCR);
     }
     
-    MultiFileInputFormat format = new DummyMultiFileInputFormat();
+    MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
     FileSystem fs = FileSystem.getLocal(job);
     
     for(int numFiles = 1; numFiles< MAX_NUM_FILES ; 
@@ -106,7 +107,7 @@
         for(MultiFileSplit split : splits) {
           long splitLength = 0;
           for(Path p : split.getPaths()) {
-            long length = fs.getContentLength(p);
+            long length = fs.getContentSummary(p).getLength();
             assertEquals(length, lengths.get(p.getName()).longValue());
             splitLength += length;
             String name = p.getName();
@@ -125,7 +126,7 @@
   }
   
   public void testFormatWithLessPathsThanSplits() throws Exception {
-    MultiFileInputFormat format = new DummyMultiFileInputFormat();
+    MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
     FileSystem fs = FileSystem.getLocal(job);     
     
     // Test with no path



Mime
View raw message