hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1415815 [2/3] - in /hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project: ./ conf/ dev-support/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/h...
Date Fri, 30 Nov 2012 19:58:54 GMT
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java Fri Nov 30 19:58:09 2012
@@ -25,12 +25,15 @@ import java.util.concurrent.CountDownLat
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
@@ -39,12 +42,18 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
+import org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
+import org.apache.hadoop.yarn.event.AsyncDispatcher;
+import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.event.Event;
 import org.junit.Test;
 
 /**
  * Tests the state machine with respect to Job/Task/TaskAttempt kill scenarios.
  *
  */
+@SuppressWarnings({"unchecked", "rawtypes"})
 public class TestKill {
 
   @Test
@@ -132,6 +141,80 @@ public class TestKill {
   }
 
   @Test
+  public void testKillTaskWait() throws Exception {
+    final Dispatcher dispatcher = new AsyncDispatcher() {
+      private TaskAttemptEvent cachedKillEvent;
+      @Override
+      protected void dispatch(Event event) {
+        if (event instanceof TaskAttemptEvent) {
+          TaskAttemptEvent killEvent = (TaskAttemptEvent) event;
+          if (killEvent.getType() == TaskAttemptEventType.TA_KILL) {
+            TaskAttemptId taID = killEvent.getTaskAttemptID();
+            if (taID.getTaskId().getTaskType() == TaskType.REDUCE
+                && taID.getTaskId().getId() == 0 && taID.getId() == 0) {
+              // Task is asking the reduce TA to kill itself. 'Create' a race
+              // condition. Make the task succeed and then inform the task that
+              // TA has succeeded. Once Task gets the TA succeeded event at
+              // KILL_WAIT, then relay the actual kill signal to TA
+              super.dispatch(new TaskAttemptEvent(taID,
+                TaskAttemptEventType.TA_DONE));
+              super.dispatch(new TaskAttemptEvent(taID,
+                TaskAttemptEventType.TA_CONTAINER_CLEANED));
+              super.dispatch(new TaskTAttemptEvent(taID,
+                TaskEventType.T_ATTEMPT_SUCCEEDED));
+              this.cachedKillEvent = killEvent;
+              return;
+            }
+          }
+        } else if (event instanceof TaskEvent) {
+          TaskEvent taskEvent = (TaskEvent) event;
+          if (taskEvent.getType() == TaskEventType.T_ATTEMPT_SUCCEEDED
+              && this.cachedKillEvent != null) {
+            // When the TA comes and reports that it is done, send the
+            // cachedKillEvent
+            super.dispatch(this.cachedKillEvent);
+            return;
+          }
+
+        }
+        super.dispatch(event);
+      }
+    };
+    MRApp app = new MRApp(1, 1, false, this.getClass().getName(), true) {
+      @Override
+      public Dispatcher createDispatcher() {
+        return dispatcher;
+      }
+    };
+    Job job = app.submit(new Configuration());
+    JobId jobId = app.getJobId();
+    app.waitForState(job, JobState.RUNNING);
+    Assert.assertEquals("Num tasks not correct", 2, job.getTasks().size());
+    Iterator<Task> it = job.getTasks().values().iterator();
+    Task mapTask = it.next();
+    Task reduceTask = it.next();
+    app.waitForState(mapTask, TaskState.RUNNING);
+    app.waitForState(reduceTask, TaskState.RUNNING);
+    TaskAttempt mapAttempt = mapTask.getAttempts().values().iterator().next();
+    app.waitForState(mapAttempt, TaskAttemptState.RUNNING);
+    TaskAttempt reduceAttempt = reduceTask.getAttempts().values().iterator().next();
+    app.waitForState(reduceAttempt, TaskAttemptState.RUNNING);
+
+    // Finish map
+    app.getContext().getEventHandler().handle(
+        new TaskAttemptEvent(
+            mapAttempt.getID(),
+            TaskAttemptEventType.TA_DONE));
+    app.waitForState(mapTask, TaskState.SUCCEEDED);
+
+    // Now kill the job
+    app.getContext().getEventHandler()
+      .handle(new JobEvent(jobId, JobEventType.JOB_KILL));
+
+    app.waitForInternalState((JobImpl)job, JobStateInternal.KILLED);
+  }
+
+  @Test
   public void testKillTaskAttempt() throws Exception {
     final CountDownLatch latch = new CountDownLatch(1);
     MRApp app = new BlockingMRApp(2, 0, latch);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java Fri Nov 30 19:58:09 2012
@@ -86,7 +86,6 @@ public class TestMRClientService {
     taskAttemptStatus.stateString = "RUNNING";
     taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
     taskAttemptStatus.phase = Phase.MAP;
-    taskAttemptStatus.outputSize = 3;
     // send the status update
     app.getContext().getEventHandler().handle(
         new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Fri Nov 30 19:58:09 2012
@@ -27,6 +27,7 @@ import static org.mockito.Mockito.verify
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
+import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -42,6 +43,7 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -51,10 +53,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl.InitTransition;
 import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl.JobNoTasksCompletedTransition;
 import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.SystemClock;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.server.resourcemanager.resourcetracker.InlineDispatcher;
+import org.apache.hadoop.yarn.state.StateMachine;
+import org.apache.hadoop.yarn.state.StateMachineFactory;
 import org.apache.hadoop.yarn.util.Records;
 import org.junit.Assert;
 import org.junit.Test;
@@ -340,7 +346,7 @@ public class TestJobImpl {
     return isUber;
   }
 
-  private InitTransition getInitTransition() {
+  private static InitTransition getInitTransition() {
     InitTransition initTransition = new InitTransition() {
       @Override
       protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
@@ -350,4 +356,63 @@ public class TestJobImpl {
     };
     return initTransition;
   }
+
+  @Test
+  public void testTransitionsAtFailed() throws IOException {
+    Configuration conf = new Configuration();
+    JobID jobID = JobID.forName("job_1234567890000_0001");
+    JobId jobId = TypeConverter.toYarn(jobID);
+    OutputCommitter committer = mock(OutputCommitter.class);
+    doThrow(new IOException("forcefail"))
+      .when(committer).setupJob(any(JobContext.class));
+    InlineDispatcher dispatcher = new InlineDispatcher();
+    JobImpl job = new StubbedJob(jobId, Records
+        .newRecord(ApplicationAttemptId.class), conf,
+        dispatcher.getEventHandler(), committer, true, null);
+
+    dispatcher.register(JobEventType.class, job);
+    job.handle(new JobEvent(jobId, JobEventType.JOB_INIT));
+    Assert.assertEquals(JobState.FAILED, job.getState());
+
+    job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_COMPLETED));
+    Assert.assertEquals(JobState.FAILED, job.getState());
+    job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_ATTEMPT_COMPLETED));
+    Assert.assertEquals(JobState.FAILED, job.getState());
+    job.handle(new JobEvent(jobId, JobEventType.JOB_MAP_TASK_RESCHEDULED));
+    Assert.assertEquals(JobState.FAILED, job.getState());
+    job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE));
+    Assert.assertEquals(JobState.FAILED, job.getState());
+  }
+
+  private static class StubbedJob extends JobImpl {
+    //override the init transition
+    private final InitTransition initTransition = getInitTransition();
+    StateMachineFactory<JobImpl, JobStateInternal, JobEventType, JobEvent> localFactory
+        = stateMachineFactory.addTransition(JobStateInternal.NEW,
+            EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED),
+            JobEventType.JOB_INIT,
+            // This is abusive.
+            initTransition);
+
+    private final StateMachine<JobStateInternal, JobEventType, JobEvent>
+        localStateMachine;
+
+    @Override
+    protected StateMachine<JobStateInternal, JobEventType, JobEvent> getStateMachine() {
+      return localStateMachine;
+    }
+
+    public StubbedJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
+        Configuration conf, EventHandler eventHandler,
+        OutputCommitter committer, boolean newApiCommitter, String user) {
+      super(jobId, applicationAttemptId, conf, eventHandler,
+          null, new JobTokenSecretManager(), new Credentials(),
+          new SystemClock(), null, MRAppMetrics.create(), committer,
+          newApiCommitter, user, System.currentTimeMillis(), null, null);
+
+      // This "this leak" is okay because the retained pointer is in an
+      //  instance variable.
+      localStateMachine = localFactory.make(this);
+    }
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java Fri Nov 30 19:58:09 2012
@@ -141,7 +141,6 @@ public class TestTaskImpl {
 
     private float progress = 0;
     private TaskAttemptState state = TaskAttemptState.NEW;
-    private TaskAttemptId attemptId;
     private TaskType taskType;
 
     public MockTaskAttemptImpl(TaskId taskId, int id, EventHandler eventHandler,
@@ -152,14 +151,11 @@ public class TestTaskImpl {
         AppContext appContext, TaskType taskType) {
       super(taskId, id, eventHandler, taskAttemptListener, jobFile, partition, conf,
           dataLocations, committer, jobToken, credentials, clock, appContext);
-      attemptId = Records.newRecord(TaskAttemptId.class);
-      attemptId.setId(id);
-      attemptId.setTaskId(taskId);
       this.taskType = taskType;
     }
 
     public TaskAttemptId getAttemptId() {
-      return attemptId;
+      return getID();
     }
     
     @Override
@@ -561,4 +557,49 @@ public class TestTaskImpl {
     mockTask = createMockTask(TaskType.REDUCE);
     runSpeculativeTaskAttemptSucceeds(TaskEventType.T_ATTEMPT_COMMIT_PENDING);
   }
+
+  @Test
+  public void testSpeculativeMapFetchFailure() {
+    // Setup a scenario where speculative task wins, first attempt killed
+    mockTask = createMockTask(TaskType.MAP);
+    runSpeculativeTaskAttemptSucceeds(TaskEventType.T_ATTEMPT_KILLED);
+    assertEquals(2, taskAttempts.size());
+
+    // speculative attempt retroactively fails from fetch failures
+    mockTask.handle(new TaskTAttemptEvent(taskAttempts.get(1).getAttemptId(),
+        TaskEventType.T_ATTEMPT_FAILED));
+
+    assertTaskScheduledState();
+    assertEquals(3, taskAttempts.size());
+  }
+
+  @Test
+  public void testSpeculativeMapMultipleSucceedFetchFailure() {
+    // Setup a scenario where speculative task wins, first attempt succeeds
+    mockTask = createMockTask(TaskType.MAP);
+    runSpeculativeTaskAttemptSucceeds(TaskEventType.T_ATTEMPT_SUCCEEDED);
+    assertEquals(2, taskAttempts.size());
+
+    // speculative attempt retroactively fails from fetch failures
+    mockTask.handle(new TaskTAttemptEvent(taskAttempts.get(1).getAttemptId(),
+        TaskEventType.T_ATTEMPT_FAILED));
+
+    assertTaskScheduledState();
+    assertEquals(3, taskAttempts.size());
+  }
+
+  @Test
+  public void testSpeculativeMapFailedFetchFailure() {
+    // Setup a scenario where speculative task wins, first attempt succeeds
+    mockTask = createMockTask(TaskType.MAP);
+    runSpeculativeTaskAttemptSucceeds(TaskEventType.T_ATTEMPT_FAILED);
+    assertEquals(2, taskAttempts.size());
+
+    // speculative attempt retroactively fails from fetch failures
+    mockTask.handle(new TaskTAttemptEvent(taskAttempts.get(1).getAttemptId(),
+        TaskEventType.T_ATTEMPT_FAILED));
+
+    assertTaskScheduledState();
+    assertEquals(3, taskAttempts.size());
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Fri Nov 30 19:58:09 2012
@@ -18,13 +18,8 @@
 
 package org.apache.hadoop.mapreduce.v2.util;
 
-import java.io.BufferedReader;
-import java.io.File;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.net.URI;
-import java.net.URL;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -59,6 +54,8 @@ import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 
+import com.google.common.base.Charsets;
+
 /**
  * Helper class for MR applications
  */
@@ -132,61 +129,24 @@ public class MRApps extends Apps {
 
   private static void setMRFrameworkClasspath(
       Map<String, String> environment, Configuration conf) throws IOException {
-    InputStream classpathFileStream = null;
-    BufferedReader reader = null;
-    try {
-      // Get yarn mapreduce-app classpath from generated classpath
-      // Works if compile time env is same as runtime. Mainly tests.
-      ClassLoader thisClassLoader =
-          Thread.currentThread().getContextClassLoader();
-      String mrAppGeneratedClasspathFile = "mrapp-generated-classpath";
-      classpathFileStream =
-          thisClassLoader.getResourceAsStream(mrAppGeneratedClasspathFile);
-
-      // Put the file itself on classpath for tasks.
-      URL classpathResource = thisClassLoader
-        .getResource(mrAppGeneratedClasspathFile);
-      if (classpathResource != null) {
-        String classpathElement = classpathResource.getFile();
-        if (classpathElement.contains("!")) {
-          classpathElement = classpathElement.substring(0,
-            classpathElement.indexOf("!"));
-        } else {
-          classpathElement = new File(classpathElement).getParent();
-        }
-        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
-          classpathElement);
-      }
-
-      if (classpathFileStream != null) {
-        reader = new BufferedReader(new InputStreamReader(classpathFileStream));
-        String cp = reader.readLine();
-        if (cp != null) {
-          Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
-            cp.trim());
-        }
-      }
-
-      // Add standard Hadoop classes
-      for (String c : conf.getStrings(
-          YarnConfiguration.YARN_APPLICATION_CLASSPATH,
-          YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
-        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
-            .trim());
-      }
-      for (String c : conf.getStrings(
-          MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
-          MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH)) {
-        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
-            .trim());
-      }
-    } finally {
-      if (classpathFileStream != null) {
-        classpathFileStream.close();
-      }
-      if (reader != null) {
-        reader.close();
-      }
+    // Propagate the system classpath when using the mini cluster
+    if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+      Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
+          System.getProperty("java.class.path"));
+    }
+
+    // Add standard Hadoop classes
+    for (String c : conf.getStrings(
+        YarnConfiguration.YARN_APPLICATION_CLASSPATH,
+        YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
+      Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
+          .trim());
+    }
+    for (String c : conf.getStrings(
+        MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
+        MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH)) {
+      Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
+          .trim());
     }
     // TODO: Remove duplicates.
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml Fri Nov 30 19:58:09 2012
@@ -51,7 +51,7 @@
         <artifactId>maven-surefire-plugin</artifactId>
 	<configuration>
           <systemPropertyVariables>
-            <log4j.configuration>file:///${project.parent.basedir}/../src/test/log4j.properties</log4j.configuration>
+            <log4j.configuration>file:///${project.basedir}/src/test/resources/log4j.properties</log4j.configuration>
           </systemPropertyVariables>
         </configuration>
       </plugin>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java Fri Nov 30 19:58:09 2012
@@ -420,6 +420,8 @@ public abstract class FileInputFormat<K,
           }
           break;
         }
+        default:
+          continue; // nothing special to do for this character
       }
     }
     pathStrings.add(commaSeparatedPaths.substring(pathStart, length));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java Fri Nov 30 19:58:09 2012
@@ -18,6 +18,7 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
 import java.util.List;
@@ -30,6 +31,8 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 /**
  * <code>JobQueueClient</code> is interface provided to the user in order to get
  * JobQueue related information from the {@link JobTracker}
@@ -144,7 +147,8 @@ class JobQueueClient extends Configured 
   private void displayQueueList() throws IOException {
     JobQueueInfo[] rootQueues = jc.getRootQueues();
     for (JobQueueInfo queue : rootQueues) {
-      printJobQueueInfo(queue, new PrintWriter(System.out));
+      printJobQueueInfo(queue, new PrintWriter(new OutputStreamWriter(
+          System.out, Charsets.UTF_8)));
     }
   }
   
@@ -182,7 +186,8 @@ class JobQueueClient extends Configured 
       System.out.println("Queue \"" + queue + "\" does not exist.");
       return;
     }
-    printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out));
+    printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(
+        System.out, Charsets.UTF_8)));
     if (showJobs && (jobQueueInfo.getChildren() == null ||
         jobQueueInfo.getChildren().size() == 0)) {
       JobStatus[] jobs = jobQueueInfo.getJobStatuses();
@@ -223,10 +228,10 @@ class JobQueueClient extends Configured 
     if ("-queueinfo".equals(cmd)) {
       System.err.println(prefix + "[" + cmd + "<job-queue-name> [-showJobs]]");
     } else {
-      System.err.printf(prefix + "<command> <args>\n");
-      System.err.printf("\t[-list]\n");
-      System.err.printf("\t[-info <job-queue-name> [-showJobs]]\n");
-      System.err.printf("\t[-showacls] \n\n");
+      System.err.printf(prefix + "<command> <args>%n");
+      System.err.printf("\t[-list]%n");
+      System.err.printf("\t[-info <job-queue-name> [-showJobs]]%n");
+      System.err.printf("\t[-showacls] %n%n");
       ToolRunner.printGenericCommandUsage(System.out);
     }
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java Fri Nov 30 19:58:09 2012
@@ -25,8 +25,7 @@ import java.io.IOException;
 
 class MapTaskStatus extends TaskStatus {
 
-  private long mapFinishTime;
-  private long sortFinishTime;
+  private long mapFinishTime = 0;
   
   public MapTaskStatus() {}
 
@@ -49,10 +48,10 @@ class MapTaskStatus extends TaskStatus {
   @Override
   void setFinishTime(long finishTime) {
     super.setFinishTime(finishTime);
-    if (mapFinishTime == 0) {
-      mapFinishTime = finishTime;
+    // set mapFinishTime if it hasn't been set before
+    if (getMapFinishTime() == 0) {
+      setMapFinishTime(finishTime);
     }
-    setSortFinishTime(finishTime);
   }
   
   @Override
@@ -74,16 +73,6 @@ class MapTaskStatus extends TaskStatus {
   void setMapFinishTime(long mapFinishTime) {
     this.mapFinishTime = mapFinishTime;
   }
-
-  @Override
-  public long getSortFinishTime() {
-    return sortFinishTime;
-  }
-
-  @Override
-  void setSortFinishTime(long sortFinishTime) {
-    this.sortFinishTime = sortFinishTime;
-  }
   
   @Override
   synchronized void statusUpdate(TaskStatus status) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java Fri Nov 30 19:58:09 2012
@@ -49,6 +49,8 @@ import org.apache.log4j.Appender;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
+import com.google.common.base.Charsets;
+
 /**
  * A simple logger to handle the task-specific user logs.
  * This class uses the system property <code>hadoop.log.dir</code>.
@@ -104,7 +106,8 @@ public class TaskLog {
   throws IOException {
     File indexFile = getIndexFile(taskid, isCleanup);
     BufferedReader fis = new BufferedReader(new InputStreamReader(
-      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null)));
+      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
+      Charsets.UTF_8));
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
     //stdout:<start-offset in the stdout file> <length>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java Fri Nov 30 19:58:09 2012
@@ -27,6 +27,8 @@ import org.apache.hadoop.io.LongWritable
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.*;
 
+import com.google.common.base.Charsets;
+
 /** 
  * An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
@@ -59,7 +61,9 @@ public class TextInputFormat extends Fil
     reporter.setStatus(genericSplit.toString());
     String delimiter = job.get("textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
-    if (null != delimiter) recordDelimiterBytes = delimiter.getBytes();
+    if (null != delimiter) {
+      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+    }
     return new LineRecordReader(job, (FileSplit) genericSplit,
         recordDelimiterBytes);
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java Fri Nov 30 19:58:09 2012
@@ -49,9 +49,7 @@ public class CombineFileRecordReader<K, 
   protected CombineFileSplit split;
   protected JobConf jc;
   protected Reporter reporter;
-  protected Class<RecordReader<K, V>> rrClass;
   protected Constructor<RecordReader<K, V>> rrConstructor;
-  protected FileSystem fs;
   
   protected int idx;
   protected long progress;
@@ -106,7 +104,6 @@ public class CombineFileRecordReader<K, 
     throws IOException {
     this.split = split;
     this.jc = job;
-    this.rrClass = rrClass;
     this.reporter = reporter;
     this.idx = 0;
     this.curReader = null;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Fri Nov 30 19:58:09 2012
@@ -56,6 +56,8 @@ import org.codehaus.jackson.JsonParseExc
 import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 
+import com.google.common.base.Charsets;
+
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 class JobSubmitter {
@@ -550,7 +552,7 @@ class JobSubmitter {
 
         for(Map.Entry<String, String> ent: nm.entrySet()) {
           credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
-              .getBytes());
+              .getBytes(Charsets.UTF_8));
         }
       } catch (JsonMappingException e) {
         json_error = true;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java Fri Nov 30 19:58:09 2012
@@ -188,7 +188,7 @@ public class HistoryViewer {
              decimal.format(counter.getValue());
 
            buff.append(
-               String.format("\n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", 
+               String.format("%n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", 
                    totalGroup.getDisplayName(),
                    counter.getDisplayName(),
                    mapValue, reduceValue, totalValue));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java Fri Nov 30 19:58:09 2012
@@ -30,6 +30,8 @@ import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -58,6 +60,8 @@ import org.apache.hadoop.conf.Configurat
 public class DBInputFormat<T extends DBWritable>
     extends InputFormat<LongWritable, T> implements Configurable {
 
+  private static final Log LOG = LogFactory.getLog(DBInputFormat.class);
+  
   private String dbProductName = "DEFAULT";
 
   /**
@@ -354,6 +358,8 @@ public class DBInputFormat<T extends DBW
         this.connection.close();
         this.connection = null;
       }
-    } catch (SQLException sqlE) { } // ignore exception on close.
+    } catch (SQLException sqlE) {
+      LOG.debug("Exception on close", sqlE);
+    }
   }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java Fri Nov 30 19:58:09 2012
@@ -219,7 +219,6 @@ public abstract class CombineFileInputFo
       Path p = fs.makeQualified(paths[i]);
       newpaths.add(p);
     }
-    paths = null;
 
     // In one single iteration, process all the paths in a single pool.
     // Processing one pool at a time ensures that a split contains paths

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java Fri Nov 30 19:58:09 2012
@@ -46,9 +46,7 @@ public class CombineFileRecordReader<K, 
                                           Integer.class};
 
   protected CombineFileSplit split;
-  protected Class<? extends RecordReader<K,V>> rrClass;
   protected Constructor<? extends RecordReader<K,V>> rrConstructor;
-  protected FileSystem fs;
   protected TaskAttemptContext context;
   
   protected int idx;
@@ -111,7 +109,6 @@ public class CombineFileRecordReader<K, 
     throws IOException {
     this.split = split;
     this.context = context;
-    this.rrClass = rrClass;
     this.idx = 0;
     this.curReader = null;
     this.progress = 0;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java Fri Nov 30 19:58:09 2012
@@ -425,6 +425,8 @@ public abstract class FileInputFormat<K,
           }
           break;
         }
+        default:
+          continue; // nothing special to do for this character
       }
     }
     pathStrings.add(commaSeparatedPaths.substring(pathStart, length));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java Fri Nov 30 19:58:09 2012
@@ -107,25 +107,14 @@ public class NLineInputFormat extends Fi
         numLines++;
         length += num;
         if (numLines == numLinesPerSplit) {
-          // NLineInputFormat uses LineRecordReader, which always reads
-          // (and consumes) at least one character out of its upper split
-          // boundary. So to make sure that each mapper gets N lines, we
-          // move back the upper split limits of each split 
-          // by one character here.
-          if (begin == 0) {
-            splits.add(new FileSplit(fileName, begin, length - 1,
-              new String[] {}));
-          } else {
-            splits.add(new FileSplit(fileName, begin - 1, length,
-              new String[] {}));
-          }
+          splits.add(createFileSplit(fileName, begin, length));
           begin += length;
           length = 0;
           numLines = 0;
         }
       }
       if (numLines != 0) {
-        splits.add(new FileSplit(fileName, begin, length, new String[]{}));
+        splits.add(createFileSplit(fileName, begin, length));
       }
     } finally {
       if (lr != null) {
@@ -134,6 +123,23 @@ public class NLineInputFormat extends Fi
     }
     return splits; 
   }
+
+  /**
+   * NLineInputFormat uses LineRecordReader, which always reads
+   * (and consumes) at least one character out of its upper split
+   * boundary. So to make sure that each mapper gets N lines, we
+   * move back the upper split limits of each split 
+   * by one character here.
+   * @param fileName  Path of file
+   * @param begin  the position of the first byte in the file to process
+   * @param length  number of bytes in InputSplit
+   * @return  FileSplit
+   */
+  protected static FileSplit createFileSplit(Path fileName, long begin, long length) {
+    return (begin == 0) 
+    ? new FileSplit(fileName, begin, length - 1, new String[] {})
+    : new FileSplit(fileName, begin - 1, length, new String[] {});
+  }
   
   /**
    * Set the number of lines per split

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java Fri Nov 30 19:58:09 2012
@@ -32,6 +32,8 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
+import com.google.common.base.Charsets;
+
 /** An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
  * the position in the file, and values are the line of text.. */
@@ -47,7 +49,7 @@ public class TextInputFormat extends Fil
         "textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
     if (null != delimiter)
-      recordDelimiterBytes = delimiter.getBytes();
+      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
     return new LineRecordReader(recordDelimiterBytes);
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java Fri Nov 30 19:58:09 2012
@@ -18,21 +18,23 @@
 package org.apache.hadoop.mapreduce.security;
 
 
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.PrintStream;
 import java.net.URL;
 
 import javax.crypto.SecretKey;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.record.Utils;
 
+import com.google.common.base.Charsets;
+
 /**
  * 
  * utilities for generating kyes, hashes and verifying them for shuffle
@@ -41,6 +43,8 @@ import org.apache.hadoop.record.Utils;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class SecureShuffleUtils {
+  private static final Log LOG = LogFactory.getLog(SecureShuffleUtils.class);
+  
   public static final String HTTP_HEADER_URL_HASH = "UrlHash";
   public static final String HTTP_HEADER_REPLY_URL_HASH = "ReplyHash";
   
@@ -49,7 +53,8 @@ public class SecureShuffleUtils {
    * @param msg
    */
   public static String generateHash(byte[] msg, SecretKey key) {
-    return new String(Base64.encodeBase64(generateByteHash(msg, key)));
+    return new String(Base64.encodeBase64(generateByteHash(msg, key)), 
+        Charsets.UTF_8);
   }
   
   /**
@@ -80,7 +85,7 @@ public class SecureShuffleUtils {
    */
   public static String hashFromString(String enc_str, SecretKey key) 
   throws IOException {
-    return generateHash(enc_str.getBytes(), key); 
+    return generateHash(enc_str.getBytes(Charsets.UTF_8), key); 
   }
   
   /**
@@ -91,9 +96,9 @@ public class SecureShuffleUtils {
    */
   public static void verifyReply(String base64Hash, String msg, SecretKey key)
   throws IOException {
-    byte[] hash = Base64.decodeBase64(base64Hash.getBytes());
+    byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8));
     
-    boolean res = verifyHash(hash, msg.getBytes(), key);
+    boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key);
     
     if(res != true) {
       throw new IOException("Verification of the hashReply failed");
@@ -126,19 +131,4 @@ public class SecureShuffleUtils {
   private static String buildMsgFrom(String uri_path, String uri_query, int port) {
     return String.valueOf(port) + uri_path + "?" + uri_query;
   }
-  
-  
-  /**
-   * byte array to Hex String
-   * @param ba
-   * @return string with HEX value of the key
-   */
-  public static String toHex(byte[] ba) {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PrintStream ps = new PrintStream(baos);
-    for(byte b: ba) {
-      ps.printf("%x", b);
-    }
-    return baos.toString();
-  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java Fri Nov 30 19:58:09 2012
@@ -144,7 +144,6 @@ public class Shuffle<K, V> implements Ex
     for (Fetcher<K,V> fetcher : fetchers) {
       fetcher.shutDown();
     }
-    fetchers = null;
     
     // stop the scheduler
     scheduler.close();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Fri Nov 30 19:58:09 2012
@@ -18,6 +18,7 @@
 package org.apache.hadoop.mapreduce.tools;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.List;
@@ -53,6 +54,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.yarn.logaggregation.LogDumper;
 
+import com.google.common.base.Charsets;
+
 /**
  * Interprets the map reduce cli options 
  */
@@ -426,25 +429,25 @@ public class CLI extends Configured impl
           " <job-id> <task-attempt-id>]. " +
           " <task-attempt-id> is optional to get task attempt logs.");      
     } else {
-      System.err.printf(prefix + "<command> <args>\n");
-      System.err.printf("\t[-submit <job-file>]\n");
-      System.err.printf("\t[-status <job-id>]\n");
-      System.err.printf("\t[-counter <job-id> <group-name> <counter-name>]\n");
-      System.err.printf("\t[-kill <job-id>]\n");
+      System.err.printf(prefix + "<command> <args>%n");
+      System.err.printf("\t[-submit <job-file>]%n");
+      System.err.printf("\t[-status <job-id>]%n");
+      System.err.printf("\t[-counter <job-id> <group-name> <counter-name>]%n");
+      System.err.printf("\t[-kill <job-id>]%n");
       System.err.printf("\t[-set-priority <job-id> <priority>]. " +
-        "Valid values for priorities are: " + jobPriorityValues + "\n");
-      System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]\n");
-      System.err.printf("\t[-history <jobHistoryFile>]\n");
-      System.err.printf("\t[-list [all]]\n");
-      System.err.printf("\t[-list-active-trackers]\n");
-      System.err.printf("\t[-list-blacklisted-trackers]\n");
+        "Valid values for priorities are: " + jobPriorityValues + "%n");
+      System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]%n");
+      System.err.printf("\t[-history <jobHistoryFile>]%n");
+      System.err.printf("\t[-list [all]]%n");
+      System.err.printf("\t[-list-active-trackers]%n");
+      System.err.printf("\t[-list-blacklisted-trackers]%n");
       System.err.println("\t[-list-attempt-ids <job-id> <task-type> " +
         "<task-state>]. " +
         "Valid values for <task-type> are " + taskTypes + ". " +
         "Valid values for <task-state> are " + taskStates);
-      System.err.printf("\t[-kill-task <task-attempt-id>]\n");
-      System.err.printf("\t[-fail-task <task-attempt-id>]\n");
-      System.err.printf("\t[-logs <job-id> <task-attempt-id>]\n\n");
+      System.err.printf("\t[-kill-task <task-attempt-id>]%n");
+      System.err.printf("\t[-fail-task <task-attempt-id>]%n");
+      System.err.printf("\t[-logs <job-id> <task-attempt-id>]%n%n");
       ToolRunner.printGenericCommandUsage(System.out);
     }
   }
@@ -584,7 +587,8 @@ public class CLI extends Configured impl
 
   public void displayJobList(JobStatus[] jobs) 
       throws IOException, InterruptedException {
-    displayJobList(jobs, new PrintWriter(System.out));
+    displayJobList(jobs, new PrintWriter(new OutputStreamWriter(System.out,
+        Charsets.UTF_8)));
   }
 
   @Private

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java Fri Nov 30 19:58:09 2012
@@ -19,9 +19,10 @@
 package org.apache.hadoop.mapreduce.util;
 
 import java.io.BufferedReader;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -30,6 +31,8 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import com.google.common.base.Charsets;
+
 /**
  * Plugin to calculate resource information on Linux systems.
  */
@@ -152,9 +155,10 @@ public class LinuxResourceCalculatorPlug
 
     // Read "/proc/memInfo" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsMemFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsMemFile),
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....
@@ -211,9 +215,10 @@ public class LinuxResourceCalculatorPlug
     }
     // Read "/proc/cpuinfo" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsCpuFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsCpuFile), 
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....
@@ -258,9 +263,10 @@ public class LinuxResourceCalculatorPlug
   private void readProcStatFile() {
     // Read "/proc/stat" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsStatFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsStatFile),
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java Fri Nov 30 19:58:09 2012
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFac
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java Fri Nov 30 19:58:09 2012
@@ -20,9 +20,10 @@ package org.apache.hadoop.mapreduce.util
 
 import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.List;
@@ -39,6 +40,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.StringUtils;
 
+import com.google.common.base.Charsets;
+
 /**
  * A Proc file-system based ProcessTree. Works only on Linux.
  */
@@ -350,7 +353,7 @@ public class ProcfsBasedProcessTree exte
   }
 
   private static final String PROCESSTREE_DUMP_FORMAT =
-      "\t|- %s %s %d %d %s %d %d %d %d %s\n";
+      "\t|- %s %s %d %d %s %d %d %d %d %s%n";
 
   /**
    * Get a dump of the process-tree.
@@ -363,7 +366,7 @@ public class ProcfsBasedProcessTree exte
     // The header.
     ret.append(String.format("\t|- PID PPID PGRPID SESSID CMD_NAME "
         + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
-        + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"));
+        + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE%n"));
     for (ProcessInfo p : processTree.values()) {
       if (p != null) {
         ret.append(String.format(PROCESSTREE_DUMP_FORMAT, p.getPid(), p
@@ -505,10 +508,11 @@ public class ProcfsBasedProcessTree exte
     ProcessInfo ret = null;
     // Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
       File pidDir = new File(procfsDir, pinfo.getPid());
-      fReader = new FileReader(new File(pidDir, PROCFS_STAT_FILE));
+      fReader = new InputStreamReader(new FileInputStream(
+          new File(pidDir, PROCFS_STAT_FILE)), Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // The process vanished in the interim!
@@ -695,11 +699,11 @@ public class ProcfsBasedProcessTree exte
         return ret;
       }
       BufferedReader in = null;
-      FileReader fReader = null;
+      InputStreamReader fReader = null;
       try {
-        fReader =
-            new FileReader(new File(new File(procfsDir, pid),
-                PROCFS_CMDLINE_FILE));
+        fReader = new InputStreamReader(new FileInputStream(
+            new File(new File(procfsDir, pid), PROCFS_CMDLINE_FILE)),
+            Charsets.UTF_8);
       } catch (FileNotFoundException f) {
         // The process vanished in the interim!
         return ret;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Fri Nov 30 19:58:09 2012
@@ -610,36 +610,6 @@
     </description>
   </property>
   
-<!-- Job Notification Configuration -->
-
-<!--
-<property>
- <name>mapreduce.job.end-notification.url</name>
- <value>http://localhost:8080/jobstatus.php?jobId=$jobId&amp;jobStatus=$jobStatus</value>
- <description>Indicates url which will be called on completion of job to inform
-              end status of job.
-              User can give at most 2 variables with URI : $jobId and $jobStatus.
-              If they are present in URI, then they will be replaced by their
-              respective values.
-</description>
-</property>
--->
-
-<property>
-  <name>mapreduce.job.end-notification.retry.attempts</name>
-  <value>0</value>
-  <description>Indicates how many times hadoop should attempt to contact the
-               notification URL </description>
-</property>
-
-<property>
-  <name>mapreduce.job.end-notification.retry.interval</name>
-   <value>30000</value>
-   <description>Indicates time in milliseconds between notification URL retry
-                calls</description>
-</property>
-  
-
 <property>
   <name>mapreduce.job.queuename</name>
   <value>default</value>
@@ -802,49 +772,53 @@
   </description>
 </property>
 
+<!-- Job Notification Configuration -->
 <property>
-  <name>mapreduce.job.end-notification.max.attempts</name>
-  <value>5</value>
-  <final>true</final>
-  <description>The maximum number of times a URL will be read for providing job
-    end notification. Cluster administrators can set this to limit how long
-    after end of a job, the Application Master waits before exiting. Must be
-    marked as final to prevent users from overriding this.
-  </description>
+ <name>mapreduce.job.end-notification.url</name>
+ <!--<value>http://localhost:8080/jobstatus.php?jobId=$jobId&amp;jobStatus=$jobStatus</value>-->
+ <description>Indicates url which will be called on completion of job to inform
+              end status of job.
+              User can give at most 2 variables with URI : $jobId and $jobStatus.
+              If they are present in URI, then they will be replaced by their
+              respective values.
+</description>
 </property>
 
 <property>
-  <name>mapreduce.job.end-notification.max.retry.interval</name>
-  <value>5</value>
-  <final>true</final>
-  <description>The maximum amount of time (in seconds) to wait before retrying
-    job end notification. Cluster administrators can set this to limit how long
-    the Application Master waits before exiting. Must be marked as final to
-    prevent users from overriding this.</description>
+  <name>mapreduce.job.end-notification.retry.attempts</name>
+  <value>0</value>
+  <description>The number of times the submitter of the job wants to retry job
+    end notification if it fails. This is capped by
+    mapreduce.job.end-notification.max.attempts</description>
 </property>
 
 <property>
-  <name>mapreduce.job.end-notification.url</name>
-  <value></value>
-  <description>The URL to send job end notification. It may contain sentinels
-    $jobId and $jobStatus which will be replaced with jobId and jobStatus.
-  </description>
+  <name>mapreduce.job.end-notification.retry.interval</name>
+  <value>1000</value>
+  <description>The number of milliseconds the submitter of the job wants to
+    wait before job end notification is retried if it fails. This is capped by
+    mapreduce.job.end-notification.max.retry.interval</description>
 </property>
 
 <property>
-  <name>mapreduce.job.end-notification.retry.attempts</name>
+  <name>mapreduce.job.end-notification.max.attempts</name>
   <value>5</value>
-  <description>The number of times the submitter of the job wants to retry job
-    end notification if it fails. This is capped by
-    mapreduce.job.end-notification.max.attempts</description>
+  <final>true</final>
+  <description>The maximum number of times a URL will be read for providing job
+    end notification. Cluster administrators can set this to limit how long
+    after end of a job, the Application Master waits before exiting. Must be
+    marked as final to prevent users from overriding this.
+  </description>
 </property>
 
 <property>
-  <name>mapreduce.job.end-notification.retry.interval</name>
-  <value>1</value>
-  <description>The number of seconds the submitter of the job wants to wait
-    before job end notification is retried if it fails. This is capped by
-    mapreduce.job.end-notification.max.retry.interval</description>
+  <name>mapreduce.job.end-notification.max.retry.interval</name>
+  <value>5000</value>
+  <final>true</final>
+  <description>The maximum amount of time (in milliseconds) to wait before
+     retrying job end notification. Cluster administrators can set this to
+     limit how long the Application Master waits before exiting. Must be marked
+     as final to prevent users from overriding this.</description>
 </property>
 
 <property>

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1407223-1415786

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java Fri Nov 30 19:58:09 2012
@@ -535,8 +535,9 @@ public class HistoryFileManager extends 
     if (serialPart == null) {
       LOG.warn("Could not find serial portion from path: "
           + serialDirPath.toString() + ". Continuing with next");
+    } else {
+      serialNumberIndex.add(serialPart, timestampPart);
     }
-    serialNumberIndex.add(serialPart, timestampPart);
   }
 
   private void addDirectoryToJobListCache(Path path) throws IOException {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java Fri Nov 30 19:58:09 2012
@@ -27,6 +27,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.StringUtils;
@@ -49,6 +51,8 @@ public class JobHistoryServer extends Co
    */
   public static final int SHUTDOWN_HOOK_PRIORITY = 30;
 
+  public static final long historyServerTimeStamp = System.currentTimeMillis();
+
   private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
   private HistoryContext historyContext;
   private HistoryClientService clientService;
@@ -106,6 +110,8 @@ public class JobHistoryServer extends Co
 
   @Override
   public void start() {
+    DefaultMetricsSystem.initialize("JobHistoryServer");
+    JvmMetrics.initSingleton("JobHistoryServer", null);
     try {
       jhsDTSecretManager.startThreads();
     } catch(IOException io) {
@@ -118,6 +124,7 @@ public class JobHistoryServer extends Co
   @Override
   public void stop() {
     jhsDTSecretManager.stopThreads();
+    DefaultMetricsSystem.shutdown();
     super.stop();
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java Fri Nov 30 19:58:09 2012
@@ -21,7 +21,9 @@ package org.apache.hadoop.mapreduce.v2.h
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
 
+import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
+import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.view.InfoBlock;
 
@@ -47,7 +49,9 @@ public class HsAboutPage extends HsView 
   @Override protected Class<? extends SubView> content() {
     HistoryInfo info = new HistoryInfo();
     info("History Server").
-      _("BuildVersion", info.getHadoopBuildVersion() + " on " + info.getHadoopVersionBuiltOn());
+      _("BuildVersion", info.getHadoopBuildVersion()
+        + " on " + info.getHadoopVersionBuiltOn()).
+      _("History Server started on", Times.format(info.getStartedOn()));
     return InfoBlock.class;
   }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java Fri Nov 30 19:58:09 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.h
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
+import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
@@ -37,7 +38,7 @@ import com.google.inject.Inject;
  */
 public class HsJobsBlock extends HtmlBlock {
   final AppContext appContext;
-  static final SimpleDateFormat dateFormat =
+  final SimpleDateFormat dateFormat =
     new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
 
   @Inject HsJobsBlock(AppContext appCtx) {
@@ -67,22 +68,36 @@ public class HsJobsBlock extends HtmlBlo
             th("Reduces Completed")._()._().
         tbody();
     LOG.info("Getting list of all Jobs.");
+    // Write all the data into a JavaScript array of arrays for JQuery
+    // DataTables to display
+    StringBuilder jobsTableData = new StringBuilder("[\n");
     for (Job j : appContext.getAllJobs().values()) {
       JobInfo job = new JobInfo(j);
-      tbody.
-        tr().
-          td(dateFormat.format(new Date(job.getStartTime()))).
-          td(dateFormat.format(new Date(job.getFinishTime()))).
-          td().a(url("job", job.getId()), job.getId())._().
-          td(job.getName()).
-          td(job.getUserName()).
-          td(job.getQueueName()).
-          td(job.getState()).
-          td(String.valueOf(job.getMapsTotal())).
-          td(String.valueOf(job.getMapsCompleted())).
-          td(String.valueOf(job.getReducesTotal())).
-          td(String.valueOf(job.getReducesCompleted()))._();
+      jobsTableData.append("[\"")
+      .append(dateFormat.format(new Date(job.getStartTime()))).append("\",\"")
+      .append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
+      .append("<a href='").append(url("job", job.getId())).append("'>")
+      .append(job.getId()).append("</a>\",\"")
+      .append(StringEscapeUtils.escapeHtml(job.getName()))
+      .append("\",\"")
+      .append(StringEscapeUtils.escapeHtml(job.getUserName()))
+      .append("\",\"")
+      .append(StringEscapeUtils.escapeHtml(job.getQueueName()))
+      .append("\",\"")
+      .append(job.getState()).append("\",\"")
+      .append(String.valueOf(job.getMapsTotal())).append("\",\"")
+      .append(String.valueOf(job.getMapsCompleted())).append("\",\"")
+      .append(String.valueOf(job.getReducesTotal())).append("\",\"")
+      .append(String.valueOf(job.getReducesCompleted())).append("\"],\n");
     }
+
+    //Remove the last comma and close off the array of arrays
+    if(jobsTableData.charAt(jobsTableData.length() - 2) == ',') {
+      jobsTableData.delete(jobsTableData.length()-2, jobsTableData.length()-1);
+    }
+    jobsTableData.append("]");
+    html.script().$type("text/javascript").
+    _("var jobsTableData=" + jobsTableData)._();
     tbody._().
     tfoot().
       tr().

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java Fri Nov 30 19:58:09 2012
@@ -70,7 +70,6 @@ public class HsNavBlock extends HtmlBloc
           li().a("/conf", "Configuration")._().
           li().a("/logs", "Local logs")._().
           li().a("/stacks", "Server stacks")._().
-          li().a("/metrics", "Server metrics")._()._()._().
-    div("#themeswitcher")._();
+          li().a("/metrics", "Server metrics")._()._()._();
   }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Fri Nov 30 19:58:09 2012
@@ -29,7 +29,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -100,6 +99,10 @@ public class HsTasksBlock extends HtmlBl
     theadRow.th("Elapsed Time"); //Attempt
 
     TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
+
+    // Write all the data into a JavaScript array of arrays for JQuery
+    // DataTables to display
+    StringBuilder tasksTableData = new StringBuilder("[\n");
     for (Task task : app.getJob().getTasks().values()) {
       if (type != null && task.getType() != type) {
         continue;
@@ -137,55 +140,36 @@ public class HsTasksBlock extends HtmlBl
         attemptFinishTime = ta.getFinishTime();
         attemptElapsed = ta.getElapsedTime();
       }
+      tasksTableData.append("[\"")
+      .append("<a href='" + url("task", tid)).append("'>")
+      .append(tid).append("</a>\",\"")
+      .append(info.getState()).append("\",\"")
+      .append(startTime).append("\",\"")
+      .append(finishTime).append("\",\"")
+      .append(elapsed).append("\",\"")
+      .append(attemptStartTime).append("\",\"");
 
-      TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
-      row.
-          td().
-            br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
-            a(url("task", tid), tid)._().
-          td(info.getState()).
-          td().
-            br().$title(String.valueOf(startTime))._().
-            _(Times.format(startTime))._().
-          td().
-            br().$title(String.valueOf(finishTime))._().
-            _(Times.format(finishTime))._().
-          td().
-            br().$title(String.valueOf(elapsed))._().
-            _(formatTime(elapsed))._().
-          td().
-            br().$title(String.valueOf(attemptStartTime))._().
-            _(Times.format(attemptStartTime))._();
       if(type == TaskType.REDUCE) {
-        row.td().
-          br().$title(String.valueOf(shuffleFinishTime))._().
-          _(Times.format(shuffleFinishTime))._();
-        row.td().
-        br().$title(String.valueOf(sortFinishTime))._().
-        _(Times.format(sortFinishTime))._();
+        tasksTableData.append(shuffleFinishTime).append("\",\"")
+        .append(sortFinishTime).append("\",\"");
       }
-      row.
-          td().
-            br().$title(String.valueOf(attemptFinishTime))._().
-            _(Times.format(attemptFinishTime))._();
-
+      tasksTableData.append(attemptFinishTime).append("\",\"");
       if(type == TaskType.REDUCE) {
-        row.td().
-          br().$title(String.valueOf(elapsedShuffleTime))._().
-        _(formatTime(elapsedShuffleTime))._();
-        row.td().
-        br().$title(String.valueOf(elapsedSortTime))._().
-      _(formatTime(elapsedSortTime))._();
-        row.td().
-          br().$title(String.valueOf(elapsedReduceTime))._().
-        _(formatTime(elapsedReduceTime))._();
+        tasksTableData.append(elapsedShuffleTime).append("\",\"")
+        .append(elapsedSortTime).append("\",\"")
+        .append(elapsedReduceTime).append("\",\"");
       }
-
-      row.td().
-        br().$title(String.valueOf(attemptElapsed))._().
-        _(formatTime(attemptElapsed))._();
-      row._();
+      tasksTableData.append(attemptElapsed).append("\"],\n");
     }
+    //Remove the last comma and close off the array of arrays
+    if(tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
+      tasksTableData.delete(
+        tasksTableData.length()-2, tasksTableData.length()-1);
+    }
+    tasksTableData.append("]");
+    html.script().$type("text/javascript").
+    _("var tasksTableData=" + tasksTableData)._();
+    
     TR<TFOOT<TABLE<Hamlet>>> footRow = tbody._().tfoot().tr();
     footRow.th().input("search_init").$type(InputType.text).$name("task")
         .$value("ID")._()._().th().input("search_init").$type(InputType.text)

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java Fri Nov 30 19:58:09 2012
@@ -67,18 +67,32 @@ public class HsTasksPage extends HsView 
       type = MRApps.taskType(symbol);
     }
     StringBuilder b = tableInit().
-    append(",aoColumnDefs:[");
-    b.append("{'sType':'title-numeric', 'aTargets': [ 0, 4");
+    append(", 'aaData': tasksTableData");
+    b.append(", bDeferRender: true");
+    b.append(", bProcessing: true");
+
+    b.append("\n, aoColumnDefs: [\n");
+    b.append("{'sType':'numeric', 'aTargets': [ 0 ]");
+    b.append(", 'mRender': parseHadoopID }");
+
+    b.append(", {'sType':'numeric', 'aTargets': [ 4");
     if(type == TaskType.REDUCE) {
       b.append(", 9, 10, 11, 12");
     } else { //MAP
       b.append(", 7");
     }
-    b.append(" ] }]");
+    b.append(" ], 'mRender': renderHadoopElapsedTime }");
 
-    // Sort by id upon page load
-    b.append(", aaSorting: [[0, 'asc']]");
+    b.append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5");
+    if(type == TaskType.REDUCE) {
+      b.append(", 6, 7, 8");
+    } else { //MAP
+      b.append(", 6");
+    }
+    b.append(" ], 'mRender': renderHadoopDate }]");
 
+    // Sort by id upon page load
+    b.append("\n, aaSorting: [[0, 'asc']]");
     b.append("}");
     return b.toString();
   }



Mime
View raw message