hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r894964 - in /hadoop/mapreduce/trunk: CHANGES.txt src/java/org/apache/hadoop/mapreduce/Job.java src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java
Date Fri, 01 Jan 2010 01:15:30 GMT
Author: tomwhite
Date: Fri Jan  1 01:15:30 2010
New Revision: 894964

URL: http://svn.apache.org/viewvc?rev=894964&view=rev
Log:
MAPREDUCE-1131. Using profilers other than hprof can cause JobClient to report job failure.
Contributed by Aaron Kimball.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=894964&r1=894963&r2=894964&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Fri Jan  1 01:15:30 2010
@@ -198,6 +198,9 @@
     MAPREDUCE-1293. AutoInputFormat doesn't work with non-default FileSystems.
     (Andrew Hitchcock via tomwhite)
 
+    MAPREDUCE-1131. Using profilers other than hprof can cause JobClient to
+    report job failure. (Aaron Kimball via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java?rev=894964&r1=894963&r2=894964&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java Fri Jan  1 01:15:30
2010
@@ -1027,12 +1027,52 @@
     return isSuccessful();
   }
 
+  /**
+   * @return true if the profile parameters indicate that this is using
+   * hprof, which generates profile files in a particular location
+   * that we can retrieve to the client.
+   */
+  private boolean shouldDownloadProfile() {
+    // Check the argument string that was used to initialize profiling.
+    // If this indicates hprof and file-based output, then we're ok to
+    // download.
+    String profileParams = getProfileParams();
+
+    if (null == profileParams) {
+      return false;
+    }
+
+    // Split this on whitespace.
+    String [] parts = profileParams.split("[ \\t]+");
+
+    // If any of these indicate hprof, and the use of output files, return true.
+    boolean hprofFound = false;
+    boolean fileFound = false;
+    for (String p : parts) {
+      if (p.startsWith("-agentlib:hprof") || p.startsWith("-Xrunhprof")) {
+        hprofFound = true;
+
+        // This contains a number of comma-delimited components, one of which
+        // may specify the file to write to. Make sure this is present and
+        // not empty.
+        String [] subparts = p.split(",");
+        for (String sub : subparts) {
+          if (sub.startsWith("file=") && sub.length() != "file=".length()) {
+            fileFound = true;
+          }
+        }
+      }
+    }
+
+    return hprofFound && fileFound;
+  }
+
   private void printTaskEvents(TaskCompletionEvent[] events,
       Job.TaskStatusFilter filter, boolean profiling, IntegerRanges mapRanges,
       IntegerRanges reduceRanges) throws IOException, InterruptedException {
     for (TaskCompletionEvent event : events) {
       TaskCompletionEvent.Status status = event.getStatus();
-      if (profiling && 
+      if (profiling && shouldDownloadProfile() &&
          (status == TaskCompletionEvent.Status.SUCCEEDED ||
             status == TaskCompletionEvent.Status.FAILED) &&
             (event.isMapTask() ? mapRanges : reduceRanges).

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java?rev=894964&r1=894963&r2=894964&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java
Fri Jan  1 01:15:30 2010
@@ -19,6 +19,7 @@
 
 import java.io.BufferedReader;
 import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
@@ -34,6 +35,8 @@
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import org.junit.Test;
+
 public class TestMRJobClient extends ClusterMapReduceTestCase {
   
   private static final Log LOG = LogFactory.getLog(TestMRJobClient.class);
@@ -61,6 +64,7 @@
     }
   }
 
+  @Test
   public void testJobClient() throws Exception {
     Configuration conf = createJobConf();
     Job job = runJob(conf);
@@ -69,7 +73,8 @@
     testJobList(jobId, conf);
     testChangingJobPriority(jobId, conf);
   }
-  
+
+  @Test
   public void testGetCounter(String jobId,
       Configuration conf) throws Exception {
     ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -81,6 +86,7 @@
     assertEquals("Counter", "3", out.toString().trim());
   }
 
+  @Test
   public void testJobList(String jobId,
       Configuration conf) throws Exception {
     verifyJobPriority(jobId, "HIGH", conf, createJobClient());
@@ -106,7 +112,8 @@
     }
     pis.close();
   }
-  
+
+  @Test
   public void testChangingJobPriority(String jobId, Configuration conf)
       throws Exception {
     int exitCode = runTool(conf, createJobClient(),
@@ -115,7 +122,56 @@
     assertEquals("Exit code", 0, exitCode);
     verifyJobPriority(jobId, "VERY_LOW", conf, createJobClient());
   }
-  
+
+  @Test
+  public void testMissingProfileOutput() throws Exception {
+    Configuration conf = createJobConf();
+    final String input = "hello1\n";
+
+    // Set a job to be profiled with an empty agentlib parameter.
+    // This will fail to create profile.out files for tasks.
+    // This will succeed by skipping the HTTP fetch of the
+    // profiler output.
+    Job job = MapReduceTestUtil.createJob(conf,
+        getInputDir(), getOutputDir(), 1, 1, input);
+    job.setJobName("disable-profile-fetch");
+    job.setProfileEnabled(true);
+    job.setProfileParams("-agentlib:,verbose=n,file=%s");
+    job.setMaxMapAttempts(1);
+    job.setMaxReduceAttempts(1);
+    job.setJobSetupCleanupNeeded(false);
+    job.waitForCompletion(true);
+
+    // Run another job with an hprof agentlib param; verify
+    // that the HTTP fetch works here.
+    Job job2 = MapReduceTestUtil.createJob(conf,
+        getInputDir(), getOutputDir(), 1, 1, input);
+    job2.setJobName("enable-profile-fetch");
+    job2.setProfileEnabled(true);
+    job2.setProfileParams(
+        "-agentlib:hprof=cpu=samples,heap=sites,force=n,"
+        + "thread=y,verbose=n,file=%s");
+    job2.setProfileTaskRange(true, "0-1");
+    job2.setProfileTaskRange(false, "");
+    job2.setMaxMapAttempts(1);
+    job2.setMaxReduceAttempts(1);
+    job2.setJobSetupCleanupNeeded(false);
+    job2.waitForCompletion(true);
+
+    // Find the first map task, verify that we got its profile output file.
+    TaskReport [] reports = job2.getTaskReports(TaskType.MAP);
+    assertTrue("No task reports found!", reports.length > 0);
+    TaskReport report = reports[0];
+    TaskID id = report.getTaskId();
+    assertTrue(TaskType.MAP == id.getTaskType());
+    System.out.println("Using task id: " + id);
+    TaskAttemptID attemptId = new TaskAttemptID(id, 0);
+
+    File profileOutFile = new File(attemptId.toString() + ".profile");
+    assertTrue("Couldn't find profiler output", profileOutFile.exists());
+    assertTrue("Couldn't remove profiler output", profileOutFile.delete());
+  }
+
   protected CLI createJobClient() throws IOException {
     return new CLI();
   }



Mime
View raw message