hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From acmur...@apache.org
Subject svn commit: r648765 [2/2] - in /hadoop/core/branches/branch-0.17: ./ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/ src/contrib/index/src/java/org/apache/hadoop/cont...
Date Wed, 16 Apr 2008 17:35:45 GMT
Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/TestPipes.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/TestPipes.java?rev=648765&r1=648764&r2=648765&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/TestPipes.java
(original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/TestPipes.java
Wed Apr 16 10:35:39 2008
@@ -30,6 +30,7 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
@@ -146,7 +147,7 @@
       Submitter.setExecutable(job, fs.makeQualified(wordExec).toString());
       Submitter.setIsJavaRecordReader(job, true);
       Submitter.setIsJavaRecordWriter(job, true);
-      job.setInputPath(inputPath);
+      FileInputFormat.setInputPaths(job, inputPath);
       FileOutputFormat.setOutputPath(job, outputPath);
       RunningJob result = Submitter.submitJob(job);
       assertTrue("pipes job failed", result.isSuccessful());

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java?rev=648765&r1=648764&r2=648765&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
(original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
Wed Apr 16 10:35:39 2008
@@ -31,7 +31,7 @@
  * RecordReaders are not implemented in Java, naturally...
  */
 public class WordCountInputFormat
-  implements InputFormat<IntWritable, Text> {
+  extends FileInputFormat<IntWritable, Text> {
   
   static class WordCountInputSplit implements InputSplit  {
     private String filename;
@@ -53,7 +53,7 @@
                                 int numSplits) throws IOException {
     ArrayList<InputSplit> result = new ArrayList<InputSplit>();
     FileSystem local = FileSystem.getLocal(conf);
-    for(Path dir: conf.getInputPaths()) {
+    for(Path dir: getInputPaths(conf)) {
       for(FileStatus file: local.listStatus(dir)) {
         result.add(new WordCountInputSplit(file.getPath()));
       }

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordMR.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordMR.java?rev=648765&r1=648764&r2=648765&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordMR.java (original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordMR.java Wed
Apr 16 10:35:39 2008
@@ -308,7 +308,7 @@
 
 
     JobConf genJob = new JobConf(conf, TestRecordMR.class);
-    genJob.setInputPath(randomIns);
+    FileInputFormat.setInputPaths(genJob, randomIns);
     genJob.setInputFormat(SequenceFileInputFormat.class);
     genJob.setMapperClass(RandomGenMapper.class);
 
@@ -353,7 +353,7 @@
     Path intermediateOuts = new Path(testdir, "intermediateouts");
     fs.delete(intermediateOuts, true);
     JobConf checkJob = new JobConf(conf, TestRecordMR.class);
-    checkJob.setInputPath(randomOuts);
+    FileInputFormat.setInputPaths(checkJob, randomOuts);
     checkJob.setInputFormat(SequenceFileInputFormat.class);
     checkJob.setMapperClass(RandomCheckMapper.class);
 
@@ -376,7 +376,7 @@
     Path finalOuts = new Path(testdir, "finalouts");        
     fs.delete(finalOuts, true);
     JobConf mergeJob = new JobConf(conf, TestRecordMR.class);
-    mergeJob.setInputPath(intermediateOuts);
+    FileInputFormat.setInputPaths(mergeJob, intermediateOuts);
     mergeJob.setInputFormat(SequenceFileInputFormat.class);
     mergeJob.setMapperClass(MergeMapper.class);
         

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordWritable.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordWritable.java?rev=648765&r1=648764&r2=648765&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordWritable.java
(original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/record/TestRecordWritable.java
Wed Apr 16 10:35:39 2008
@@ -52,7 +52,7 @@
 
     fs.delete(dir, true);
 
-    job.setInputPath(dir);
+    FileInputFormat.setInputPaths(job, dir);
 
     // for a variety of lengths
     for (int length = 0; length < MAX_LENGTH;

Modified: hadoop/core/branches/branch-0.17/src/test/testshell/ExternalMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/testshell/ExternalMapReduce.java?rev=648765&r1=648764&r2=648765&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/testshell/ExternalMapReduce.java (original)
+++ hadoop/core/branches/branch-0.17/src/test/testshell/ExternalMapReduce.java Wed Apr 16
10:35:39 2008
@@ -27,6 +27,7 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
@@ -100,7 +101,7 @@
     Configuration commandConf = JobClient.getCommandLineConfig();
     JobConf testConf = new JobConf(commandConf, ExternalMapReduce.class);
     testConf.setJobName("external job");
-    testConf.setInputPath(input);
+    FileInputFormat.setInputPaths(testConf, input);
     FileOutputFormat.setOutputPath(testConf, outDir);
     testConf.setMapperClass(ExternalMapReduce.class);
     testConf.setReducerClass(ExternalMapReduce.class);



Mime
View raw message