hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r794609 - in /hadoop/mapreduce/trunk: CHANGES.txt src/java/mapred-default.xml src/java/org/apache/hadoop/mapred/ReduceTask.java
Date Thu, 16 Jul 2009 09:45:18 GMT
Author: ddas
Date: Thu Jul 16 09:45:17 2009
New Revision: 794609

URL: http://svn.apache.org/viewvc?rev=794609&view=rev
Log:
MAPREDUCE-353. Makes the shuffle read and connection timeouts configurable. Contributed by
Ravi Gummadi.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/java/mapred-default.xml
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=794609&r1=794608&r2=794609&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Thu Jul 16 09:45:17 2009
@@ -111,6 +111,9 @@
     MAPREDUCE-626. Improves the execution time of TestLostTracker.
     (Jothi Padmanabhan via ddas)
 
+    MAPREDUCE-353. Makes the shuffle read and connection timeouts
+    configurable. (Ravi Gummadi via ddas)
+
   BUG FIXES
     MAPREDUCE-703. Sqoop requires dependency on hsqldb in ivy.
     (Aaron Kimball via matei)

Modified: hadoop/mapreduce/trunk/src/java/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/mapred-default.xml?rev=794609&r1=794608&r2=794609&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/mapred-default.xml (original)
+++ hadoop/mapreduce/trunk/src/java/mapred-default.xml Thu Jul 16 09:45:17 2009
@@ -359,6 +359,24 @@
 </property>
 
 <property>
+  <name>mapred.shuffle.connect.timeout</name>
+  <value>180000</value>
+  <description>Expert: Cluster-wide configuration. The maximum amount of
+  time (in milli seconds) reduce task spends in trying to connect to a
+  tasktracker for getting map output.
+  </description>
+</property>
+
+<property>
+  <name>mapred.shuffle.read.timeout</name>
+  <value>30000</value>
+  <description>Expert: Cluster-wide configuration. The maximum amount of time
+  (in milli seconds) reduce task waits for map output data to be available
+  for reading after obtaining connection.
+  </description>
+</property>
+
+<property>
   <name>mapred.task.timeout</name>
   <value>600000</value>
   <description>The number of milliseconds before a task will be

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?rev=794609&r1=794608&r2=794609&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Thu Jul 16 09:45:17
2009
@@ -1140,6 +1140,8 @@
       private final static int UNIT_CONNECT_TIMEOUT = 30 * 1000;
       // default read timeout (in milliseconds)
       private final static int DEFAULT_READ_TIMEOUT = 3 * 60 * 1000;
+      private final int shuffleConnectionTimeout;
+      private final int shuffleReadTimeout;
 
       private MapOutputLocation currentLocation = null;
       private int id = nextMapOutputCopierId++;
@@ -1155,6 +1157,11 @@
         LOG.debug(getName() + " created");
         this.reporter = reporter;
         
+        shuffleConnectionTimeout =
+          job.getInt("mapred.shuffle.connect.timeout", STALLED_COPY_TIMEOUT);
+        shuffleReadTimeout =
+          job.getInt("mapred.shuffle.read.timeout", DEFAULT_READ_TIMEOUT);
+        
         if (job.getCompressMapOutput()) {
           Class<? extends CompressionCodec> codecClass =
             job.getMapOutputCompressorClass(DefaultCodec.class);
@@ -1373,8 +1380,8 @@
         // Connect
         URLConnection connection = 
           mapOutputLoc.getOutputLocation().openConnection();
-        InputStream input = getInputStream(connection, STALLED_COPY_TIMEOUT,
-                                           DEFAULT_READ_TIMEOUT); 
+        InputStream input = getInputStream(connection, shuffleConnectionTimeout,
+                                           shuffleReadTimeout); 
 
         //We will put a file in memory if it meets certain criteria:
         //1. The size of the (decompressed) file should be less than 25% of 
@@ -1477,8 +1484,8 @@
           // Reconnect
           try {
             connection = mapOutputLoc.getOutputLocation().openConnection();
-            input = getInputStream(connection, STALLED_COPY_TIMEOUT, 
-                                   DEFAULT_READ_TIMEOUT);
+            input = getInputStream(connection, shuffleConnectionTimeout, 
+                                   shuffleReadTimeout);
           } catch (IOException ioe) {
             LOG.info("Failed reopen connection to fetch map-output from " + 
                      mapOutputLoc.getHost());



Mime
View raw message