hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r551818 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/mapred/JobClient.java src/java/org/apache/hadoop/mapred/JobInProgress.java src/test/org/apache/hadoop/mapred/TestMapOutputOrder.java
Date Fri, 29 Jun 2007 07:13:15 GMT
Author: omalley
Date: Fri Jun 29 00:13:15 2007
New Revision: 551818

URL: http://svn.apache.org/viewvc?view=rev&rev=551818
Log:
Revert HADOOP-1440, because it caused multiple instances of tasks to be 
launched incorrectly.

Removed:
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMapOutputOrder.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=551818&r1=551817&r2=551818
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri Jun 29 00:13:15 2007
@@ -209,9 +209,7 @@
      thread.  Reporting during sorting and more is also more
      consistent.  (Vivek Ratan via cutting)
 
- 64. HADOOP-1440.  When reduce is disabled, use order of splits
-     returned by InputFormat#getSplits when numbering outputs.
-     (Senthil Subramanian via cutting)
+ 64. [ intentionally blank ]
 
  65. HADOOP-1453.  Remove some unneeded calls to FileSystem#exists()
      when opening files, reducing the namenode load somewhat.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?view=diff&rev=551818&r1=551817&r2=551818
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Fri Jun 29 00:13:15
2007
@@ -338,10 +338,6 @@
     LOG.debug("Creating splits at " + fs.makeQualified(submitSplitFile));
     InputSplit[] splits = 
       job.getInputFormat().getSplits(job, job.getNumMapTasks());
-    Hashtable<InputSplit, Integer> splitPositions = new Hashtable<InputSplit, Integer>();

-    for (int i = 0; i < splits.length; ++i) {
-      splitPositions.put(splits[i], i);
-    }
     // sort the splits into order based on size, so that the biggest
     // go first
     Arrays.sort(splits, new Comparator<InputSplit>() {
@@ -365,7 +361,7 @@
     // write the splits to a file for the job tracker
     FSDataOutputStream out = fs.create(submitSplitFile);
     try {
-      writeSplitsFile(splits, splitPositions, out);
+      writeSplitsFile(splits, out);
     } finally {
       out.close();
     }
@@ -394,7 +390,6 @@
   static class RawSplit implements Writable {
     private String splitClass;
     private BytesWritable bytes = new BytesWritable();
-    private int position;
     private String[] locations;
       
     public void setBytes(byte[] data, int offset, int length) {
@@ -412,19 +407,11 @@
     public BytesWritable getBytes() {
       return bytes;
     }
-
-    public void setPosition(int position) {
-      this.position = position;
-    }
       
     public void setLocations(String[] locations) {
       this.locations = locations;
     }
       
-    public int getPosition() {
-      return position;
-    }
-      
     public String[] getLocations() {
       return locations;
     }
@@ -432,7 +419,6 @@
     public void readFields(DataInput in) throws IOException {
       splitClass = Text.readString(in);
       bytes.readFields(in);
-      position = WritableUtils.readVInt(in);
       int len = WritableUtils.readVInt(in);
       locations = new String[len];
       for(int i=0; i < len; ++i) {
@@ -443,7 +429,6 @@
     public void write(DataOutput out) throws IOException {
       Text.writeString(out, splitClass);
       bytes.write(out);
-      WritableUtils.writeVInt(out, position);
       WritableUtils.writeVInt(out, locations.length);
       for(int i = 0; i < locations.length; i++) {
         Text.writeString(out, locations[i]);
@@ -463,8 +448,7 @@
    * @param splits the input splits to write out
    * @param out the stream to write to
    */
-  private void writeSplitsFile(InputSplit[] splits, Hashtable splitPositions,
-                              FSDataOutputStream out) throws IOException {
+  private void writeSplitsFile(InputSplit[] splits, FSDataOutputStream out) throws IOException
{
     out.write(SPLIT_FILE_HEADER);
     WritableUtils.writeVInt(out, CURRENT_SPLIT_FILE_VERSION);
     WritableUtils.writeVInt(out, splits.length);
@@ -475,7 +459,6 @@
       buffer.reset();
       split.write(buffer);
       rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
-      rawSplit.setPosition(((Integer) splitPositions.get(split)).intValue());
       rawSplit.setLocations(split.getLocations());
       rawSplit.write(out);
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?view=diff&rev=551818&r1=551817&r2=551818
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Fri Jun 29 00:13:15
2007
@@ -222,7 +222,7 @@
       maps[i] = new TaskInProgress(uniqueString, jobFile, 
                                    splits[i].getClassName(),
                                    splits[i].getBytes(), 
-                                   jobtracker, conf, this, splits[i].getPosition());
+                                   jobtracker, conf, this, i);
       for(String host: splits[i].getLocations()) {
         List<TaskInProgress> hostMaps = hostToMaps.get(host);
         if (hostMaps == null) {



Mime
View raw message