hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1532910 [2/3] - in /hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project: ./ bin/ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ hadoop-mapreduce-client/hadoop-mapreduce-cli...
Date Wed, 16 Oct 2013 21:07:41 GMT
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Wed Oct 16 21:07:28 2013
@@ -880,8 +880,10 @@ public class TestRMContainerAllocator {
     dispatcher.await();
 
     assigned = allocator.schedule();
+    Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
     dispatcher.await();
-    Assert.assertEquals("No of assignments must be 0", 0, assigned.size());    
+    Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
+    assertBlacklistAdditionsAndRemovals(2, 0, rm);
 
     // mark h1/h2 as bad nodes
     nodeManager1.nodeHeartbeat(false);
@@ -890,12 +892,14 @@ public class TestRMContainerAllocator {
 
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());    
 
     nodeManager3.nodeHeartbeat(true); // Node heartbeat
     dispatcher.await();
-    assigned = allocator.schedule();    
+    assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
         
     Assert.assertTrue("No of assignments must be 3", assigned.size() == 3);
     
@@ -948,7 +952,7 @@ public class TestRMContainerAllocator {
     // Known=1, blacklisted=0, ignore should be false - assign first container
     assigned =
         getContainerOnHost(jobId, 1, 1024, new String[] { "h1" },
-            nodeManagers[0], dispatcher, allocator);
+            nodeManagers[0], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     LOG.info("Failing container _1 on H1 (Node should be blacklisted and"
@@ -958,44 +962,52 @@ public class TestRMContainerAllocator {
     allocator.sendFailure(f1);
 
     // Test single node.
+    // Known=1, blacklisted=1, ignore should be true - assign 0
+    // Because makeRemoteRequest will not be aware of it until next call
+    // The current call will send blacklisted node "h1" to RM
+    assigned =
+        getContainerOnHost(jobId, 2, 1024, new String[] { "h1" },
+            nodeManagers[0], dispatcher, allocator, 1, 0, 0, 1, rm);
+    Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
+
     // Known=1, blacklisted=1, ignore should be true - assign 1
     assigned =
         getContainerOnHost(jobId, 2, 1024, new String[] { "h1" },
-            nodeManagers[0], dispatcher, allocator);
+            nodeManagers[0], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     nodeManagers[nmNum] = registerNodeManager(nmNum++, rm, dispatcher);
     // Known=2, blacklisted=1, ignore should be true - assign 1 anyway.
     assigned =
         getContainerOnHost(jobId, 3, 1024, new String[] { "h2" },
-            nodeManagers[1], dispatcher, allocator);
+            nodeManagers[1], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     nodeManagers[nmNum] = registerNodeManager(nmNum++, rm, dispatcher);
     // Known=3, blacklisted=1, ignore should be true - assign 1 anyway.
     assigned =
         getContainerOnHost(jobId, 4, 1024, new String[] { "h3" },
-            nodeManagers[2], dispatcher, allocator);
+            nodeManagers[2], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     // Known=3, blacklisted=1, ignore should be true - assign 1
     assigned =
         getContainerOnHost(jobId, 5, 1024, new String[] { "h1" },
-            nodeManagers[0], dispatcher, allocator);
+            nodeManagers[0], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     nodeManagers[nmNum] = registerNodeManager(nmNum++, rm, dispatcher);
     // Known=4, blacklisted=1, ignore should be false - assign 1 anyway
     assigned =
         getContainerOnHost(jobId, 6, 1024, new String[] { "h4" },
-            nodeManagers[3], dispatcher, allocator);
+            nodeManagers[3], dispatcher, allocator, 0, 0, 1, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     // Test blacklisting re-enabled.
     // Known=4, blacklisted=1, ignore should be false - no assignment on h1
     assigned =
         getContainerOnHost(jobId, 7, 1024, new String[] { "h1" },
-            nodeManagers[0], dispatcher, allocator);
+            nodeManagers[0], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
     // RMContainerRequestor would have created a replacement request.
 
@@ -1004,17 +1016,24 @@ public class TestRMContainerAllocator {
     allocator.sendFailure(f2);
 
     // Test ignore blacklisting re-enabled
+    // Known=4, blacklisted=2, ignore should be true. Should assign 0
+    // container for the same reason above.
+    assigned =
+        getContainerOnHost(jobId, 8, 1024, new String[] { "h1" },
+            nodeManagers[0], dispatcher, allocator, 1, 0, 0, 2, rm);
+    Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
+
     // Known=4, blacklisted=2, ignore should be true. Should assign 2
     // containers.
     assigned =
         getContainerOnHost(jobId, 8, 1024, new String[] { "h1" },
-            nodeManagers[0], dispatcher, allocator);
+            nodeManagers[0], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 2", 2, assigned.size());
 
     // Known=4, blacklisted=2, ignore should be true.
     assigned =
         getContainerOnHost(jobId, 9, 1024, new String[] { "h2" },
-            nodeManagers[1], dispatcher, allocator);
+            nodeManagers[1], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
 
     // Test blacklist while ignore blacklisting enabled
@@ -1025,7 +1044,7 @@ public class TestRMContainerAllocator {
     // Known=5, blacklisted=3, ignore should be true.
     assigned =
         getContainerOnHost(jobId, 10, 1024, new String[] { "h3" },
-            nodeManagers[2], dispatcher, allocator);
+            nodeManagers[2], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
     
     // Assign on 5 more nodes - to re-enable blacklisting
@@ -1034,14 +1053,14 @@ public class TestRMContainerAllocator {
       assigned =
           getContainerOnHost(jobId, 11 + i, 1024,
               new String[] { String.valueOf(5 + i) }, nodeManagers[4 + i],
-              dispatcher, allocator);
+              dispatcher, allocator, 0, 0, (i == 4 ? 3 : 0), 0, rm);
       Assert.assertEquals("No of assignments must be 1", 1, assigned.size());
     }
 
     // Test h3 (blacklisted while ignoring blacklisting) is blacklisted.
     assigned =
         getContainerOnHost(jobId, 20, 1024, new String[] { "h3" },
-            nodeManagers[2], dispatcher, allocator);
+            nodeManagers[2], dispatcher, allocator, 0, 0, 0, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
   }
 
@@ -1055,7 +1074,9 @@ public class TestRMContainerAllocator {
   private
       List<TaskAttemptContainerAssignedEvent> getContainerOnHost(JobId jobId,
           int taskAttemptId, int memory, String[] hosts, MockNM mockNM,
-          DrainDispatcher dispatcher, MyContainerAllocator allocator)
+          DrainDispatcher dispatcher, MyContainerAllocator allocator,
+          int expectedAdditions1, int expectedRemovals1,
+          int expectedAdditions2, int expectedRemovals2, MyResourceManager rm)
           throws Exception {
     ContainerRequestEvent reqEvent =
         createReq(jobId, taskAttemptId, memory, hosts);
@@ -1064,6 +1085,8 @@ public class TestRMContainerAllocator {
     // Send the request to the RM
     List<TaskAttemptContainerAssignedEvent> assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(
+        expectedAdditions1, expectedRemovals1, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
 
     // Heartbeat from the required nodeManager
@@ -1072,6 +1095,8 @@ public class TestRMContainerAllocator {
 
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(
+        expectedAdditions2, expectedRemovals2, rm);
     return assigned;
   }
  
@@ -1137,6 +1162,7 @@ public class TestRMContainerAllocator {
     LOG.info("RM Heartbeat (To process the scheduled containers)");
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
     Assert.assertEquals("No of assignments must be 1", 1, assigned.size());    
     
     LOG.info("Failing container _1 on H1 (should blacklist the node)");
@@ -1153,6 +1179,7 @@ public class TestRMContainerAllocator {
     //Update the Scheduler with the new requests.
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(1, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
 
     // send another request with different resource and priority
@@ -1171,6 +1198,7 @@ public class TestRMContainerAllocator {
     LOG.info("RM Heartbeat (To process the scheduled containers)");
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());    
     
     //RMContainerAllocator gets assigned a p:5 on a blacklisted node.
@@ -1179,6 +1207,7 @@ public class TestRMContainerAllocator {
     LOG.info("RM Heartbeat (To process the re-scheduled containers)");
     assigned = allocator.schedule();
     dispatcher.await();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
     Assert.assertEquals("No of assignments must be 0", 0, assigned.size());
     
     //Hearbeat from H3 to schedule on this host.
@@ -1188,6 +1217,7 @@ public class TestRMContainerAllocator {
     
     LOG.info("RM Heartbeat (To process the re-scheduled containers for H3)");
     assigned = allocator.schedule();
+    assertBlacklistAdditionsAndRemovals(0, 0, rm);
     dispatcher.await();
      
     // For debugging
@@ -1205,7 +1235,15 @@ public class TestRMContainerAllocator {
           + " host not correct", "h3", assig.getContainer().getNodeId().getHost());
     }
   }
-  
+
+  private static void assertBlacklistAdditionsAndRemovals(
+      int expectedAdditions, int expectedRemovals, MyResourceManager rm) {
+    Assert.assertEquals(expectedAdditions,
+        rm.getMyFifoScheduler().lastBlacklistAdditions.size());
+    Assert.assertEquals(expectedRemovals,
+        rm.getMyFifoScheduler().lastBlacklistRemovals.size());
+  }
+
   private static class MyFifoScheduler extends FifoScheduler {
 
     public MyFifoScheduler(RMContext rmContext) {
@@ -1220,6 +1258,8 @@ public class TestRMContainerAllocator {
     }
     
     List<ResourceRequest> lastAsk = null;
+    List<String> lastBlacklistAdditions;
+    List<String> lastBlacklistRemovals;
     
     // override this to copy the objects otherwise FifoScheduler updates the
     // numContainers in same objects as kept by RMContainerAllocator
@@ -1236,6 +1276,8 @@ public class TestRMContainerAllocator {
         askCopy.add(reqCopy);
       }
       lastAsk = ask;
+      lastBlacklistAdditions = blacklistAdditions;
+      lastBlacklistRemovals = blacklistRemovals;
       return super.allocate(
           applicationAttemptId, askCopy, release, 
           blacklistAdditions, blacklistRemovals);

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java Wed Oct 16 21:07:28 2013
@@ -869,7 +869,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public boolean safeToReportTerminationToUser() {
+    public boolean hasSuccessfullyUnregistered() {
       // bogus - Not Required
       return true;
     }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java Wed Oct 16 21:07:28 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.a
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
@@ -36,18 +37,17 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TypeConverter;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext;
 import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
-import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
 import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
+import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
@@ -57,7 +57,7 @@ import org.apache.hadoop.service.Service
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -75,7 +75,44 @@ import org.junit.Test;
    private Path stagingJobPath = new Path(stagingJobDir);
    private final static RecordFactory recordFactory = RecordFactoryProvider.
        getRecordFactory(null);
-   
+
+   @Test
+   public void testDeletionofStagingOnUnregistrationFailure()
+       throws IOException {
+     testDeletionofStagingOnUnregistrationFailure(2, false);
+     testDeletionofStagingOnUnregistrationFailure(1, true);
+   }
+
+   @SuppressWarnings("resource")
+   private void testDeletionofStagingOnUnregistrationFailure(
+       int maxAttempts, boolean shouldHaveDeleted) throws IOException {
+     conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, stagingJobDir);
+     fs = mock(FileSystem.class);
+     when(fs.delete(any(Path.class), anyBoolean())).thenReturn(true);
+     //Staging Dir exists
+     String user = UserGroupInformation.getCurrentUser().getShortUserName();
+     Path stagingDir = MRApps.getStagingAreaDir(conf, user);
+     when(fs.exists(stagingDir)).thenReturn(true);
+     ApplicationId appId = ApplicationId.newInstance(0, 1);
+     ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
+     JobId jobid = recordFactory.newRecordInstance(JobId.class);
+     jobid.setAppId(appId);
+     TestMRApp appMaster = new TestMRApp(attemptId, null,
+         JobStateInternal.RUNNING, maxAttempts);
+     appMaster.crushUnregistration = true;
+     appMaster.init(conf);
+     appMaster.start();
+     appMaster.shutDownJob();
+     ((RunningAppContext) appMaster.getContext()).computeIsLastAMRetry();
+     if (shouldHaveDeleted) {
+       Assert.assertEquals(new Boolean(true), appMaster.isLastAMRetry());
+       verify(fs).delete(stagingJobPath, true);
+     } else {
+       Assert.assertEquals(new Boolean(false), appMaster.isLastAMRetry());
+       verify(fs, never()).delete(stagingJobPath, true);
+     }
+   }
+
    @Test
    public void testDeletionofStaging() throws IOException {
      conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, stagingJobDir);
@@ -204,6 +241,7 @@ import org.junit.Test;
      ContainerAllocator allocator;
      boolean testIsLastAMRetry = false;
      JobStateInternal jobStateInternal;
+     boolean crushUnregistration = false;
 
      public TestMRApp(ApplicationAttemptId applicationAttemptId, 
          ContainerAllocator allocator, int maxAppAttempts) {
@@ -211,6 +249,7 @@ import org.junit.Test;
            applicationAttemptId, 1), "testhost", 2222, 3333,
            System.currentTimeMillis(), maxAppAttempts);
        this.allocator = allocator;
+       this.successfullyUnregistered.set(true);
      }
 
      public TestMRApp(ApplicationAttemptId applicationAttemptId,
@@ -229,7 +268,11 @@ import org.junit.Test;
      protected ContainerAllocator createContainerAllocator(
          final ClientService clientService, final AppContext context) {
        if(allocator == null) {
-         return super.createContainerAllocator(clientService, context);
+         if (crushUnregistration) {
+           return new CustomContainerAllocator(context);
+         } else {
+           return super.createContainerAllocator(clientService, context);
+         }
        }
        return allocator;
      }
@@ -280,6 +323,41 @@ import org.junit.Test;
      public boolean getTestIsLastAMRetry(){
        return testIsLastAMRetry;
      }
+
+    private class CustomContainerAllocator extends RMCommunicator
+        implements ContainerAllocator {
+
+      public CustomContainerAllocator(AppContext context) {
+        super(null, context);
+      }
+
+      @Override
+      public void serviceInit(Configuration conf) {
+      }
+
+      @Override
+      public void serviceStart() {
+      }
+
+      @Override
+      public void serviceStop() {
+        unregister();
+      }
+
+      @Override
+      protected void doUnregistration()
+          throws YarnException, IOException, InterruptedException {
+        throw new YarnException("test exception");
+      }
+
+      @Override
+      protected void heartbeat() throws Exception {
+      }
+
+      @Override
+      public void handle(ContainerAllocatorEvent event) {
+      }
+    }
    }
 
   private final class MRAppTestCleanup extends MRApp {

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Wed Oct 16 21:07:28 2013
@@ -275,7 +275,7 @@ public class TestJobImpl {
 
     AppContext mockContext = mock(AppContext.class);
     when(mockContext.isLastAMRetry()).thenReturn(true);
-    when(mockContext.safeToReportTerminationToUser()).thenReturn(false);
+    when(mockContext.hasSuccessfullyUnregistered()).thenReturn(false);
     JobImpl job = createRunningStubbedJob(conf, dispatcher, 2, mockContext);
     completeJobTasks(job);
     assertJobState(job, JobStateInternal.COMMITTING);
@@ -285,7 +285,7 @@ public class TestJobImpl {
     assertJobState(job, JobStateInternal.REBOOT);
     // return the external state as ERROR since this is last retry.
     Assert.assertEquals(JobState.RUNNING, job.getState());
-    when(mockContext.safeToReportTerminationToUser()).thenReturn(true);
+    when(mockContext.hasSuccessfullyUnregistered()).thenReturn(true);
     Assert.assertEquals(JobState.ERROR, job.getState());
 
     dispatcher.stop();
@@ -594,7 +594,7 @@ public class TestJobImpl {
         new JobDiagnosticsUpdateEvent(jobId, diagMsg);
     MRAppMetrics mrAppMetrics = MRAppMetrics.create();
     AppContext mockContext = mock(AppContext.class);
-    when(mockContext.safeToReportTerminationToUser()).thenReturn(true);
+    when(mockContext.hasSuccessfullyUnregistered()).thenReturn(true);
     JobImpl job = new JobImpl(jobId, Records
         .newRecord(ApplicationAttemptId.class), new Configuration(),
         mock(EventHandler.class),
@@ -705,7 +705,7 @@ public class TestJobImpl {
     commitHandler.start();
 
     AppContext mockContext = mock(AppContext.class);
-    when(mockContext.safeToReportTerminationToUser()).thenReturn(false);
+    when(mockContext.hasSuccessfullyUnregistered()).thenReturn(false);
     JobImpl job = createStubbedJob(conf, dispatcher, 2, mockContext);
     JobId jobId = job.getID();
     job.handle(new JobEvent(jobId, JobEventType.JOB_INIT));
@@ -722,7 +722,7 @@ public class TestJobImpl {
     job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE));
     assertJobState(job, JobStateInternal.FAILED);
     Assert.assertEquals(JobState.RUNNING, job.getState());
-    when(mockContext.safeToReportTerminationToUser()).thenReturn(true);
+    when(mockContext.hasSuccessfullyUnregistered()).thenReturn(true);
     Assert.assertEquals(JobState.FAILED, job.getState());
 
     dispatcher.stop();
@@ -762,7 +762,7 @@ public class TestJobImpl {
     JobId jobId = TypeConverter.toYarn(jobID);
     if (appContext == null) {
       appContext = mock(AppContext.class);
-      when(appContext.safeToReportTerminationToUser()).thenReturn(true);
+      when(appContext.hasSuccessfullyUnregistered()).thenReturn(true);
     }
     StubbedJob job = new StubbedJob(jobId,
         ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0),

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java Wed Oct 16 21:07:28 2013
@@ -89,6 +89,10 @@ public class TestLocalContainerAllocator
     }
 
     @Override
+    protected void unregister() {
+    }
+
+    @Override
     protected void startAllocatorThread() {
       allocatorThread = new Thread();
     }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java Wed Oct 16 21:07:28 2013
@@ -18,15 +18,9 @@
 
 package org.apache.hadoop.mapreduce.v2.jobhistory;
 
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 
 /**
  * Stores Job History configuration keys that can be set by administrators of
@@ -130,6 +124,12 @@ public class JHAdminConfig {
   public static final String MR_HISTORY_PRINCIPAL = 
     MR_HISTORY_PREFIX + "principal";
   
+  /** To enable https in MR history server */
+  public static final String MR_HS_HTTP_POLICY = MR_HISTORY_PREFIX
+      + "http.policy";
+  public static String DEFAULT_MR_HS_HTTP_POLICY =
+          CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY;
+  
   /**The address the history server webapp is on.*/
   public static final String MR_HISTORY_WEBAPP_ADDRESS =
     MR_HISTORY_PREFIX + "webapp.address";
@@ -188,43 +188,11 @@ public class JHAdminConfig {
   /** Whether to use fixed ports with the minicluster. */
   public static final String MR_HISTORY_MINICLUSTER_FIXED_PORTS = MR_HISTORY_PREFIX
        + "minicluster.fixed.ports";
-
+  
   /**
    * Default is false to be able to run tests concurrently without port
    * conflicts.
    */
   public static boolean DEFAULT_MR_HISTORY_MINICLUSTER_FIXED_PORTS = false;
 
-  public static String getResolvedMRHistoryWebAppURLWithoutScheme(
-      Configuration conf) {
-    InetSocketAddress address = null;
-    if (HttpConfig.isSecure()) {
-      address =
-          conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
-              JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
-              JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
-    } else {
-      address =
-          conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
-              JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
-              JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);    }
-    address = NetUtils.getConnectAddress(address);
-    StringBuffer sb = new StringBuffer();
-    InetAddress resolved = address.getAddress();
-    if (resolved == null || resolved.isAnyLocalAddress() || 
-        resolved.isLoopbackAddress()) {
-      String lh = address.getHostName();
-      try {
-        lh = InetAddress.getLocalHost().getCanonicalHostName();
-      } catch (UnknownHostException e) {
-        //Ignore and fallback.
-      }
-      sb.append(lh);
-    } else {
-      sb.append(address.getHostName());
-    }
-    sb.append(":").append(address.getPort());
-    return sb.toString();
-  }
-
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java Wed Oct 16 21:07:28 2013
@@ -20,11 +20,7 @@ package org.apache.hadoop.mapreduce.v2.j
 
 import java.io.File;
 import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
 import java.util.Calendar;
-import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -45,13 +41,8 @@ import org.apache.hadoop.mapreduce.MRJob
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-
-import com.google.common.base.Joiner;
-import com.google.common.base.Splitter;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
@@ -126,9 +117,6 @@ public class JobHistoryUtils {
   public static final Pattern TIMESTAMP_DIR_PATTERN = Pattern.compile(TIMESTAMP_DIR_REGEX);
   private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d";
 
-  private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
-  private static final Joiner JOINER = Joiner.on("");
-
   private static final PathFilter CONF_FILTER = new PathFilter() {
     @Override
     public boolean accept(Path path) {
@@ -497,36 +485,6 @@ public class JobHistoryUtils {
     return result;
   }
 
-  public static String getHistoryUrl(Configuration conf, ApplicationId appId) 
-       throws UnknownHostException {
-  //construct the history url for job
-    String addr = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
-        JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
-    Iterator<String> it = ADDR_SPLITTER.split(addr).iterator();
-    it.next(); // ignore the bind host
-    String port = it.next();
-    // Use hs address to figure out the host for webapp
-    addr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
-        JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
-    String host = ADDR_SPLITTER.split(addr).iterator().next();
-    String hsAddress = JOINER.join(host, ":", port);
-    InetSocketAddress address = NetUtils.createSocketAddr(
-      hsAddress, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT,
-      JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
-    StringBuffer sb = new StringBuffer();
-    if (address.getAddress().isAnyLocalAddress() || 
-        address.getAddress().isLoopbackAddress()) {
-      sb.append(InetAddress.getLocalHost().getCanonicalHostName());
-    } else {
-      sb.append(address.getHostName());
-    }
-    sb.append(":").append(address.getPort());
-    sb.append("/jobhistory/job/");
-    JobID jobId = TypeConverter.fromYarn(appId);
-    sb.append(jobId.toString());
-    return sb.toString();
-  }
-
   public static Path getPreviousJobHistoryPath(
       Configuration conf, ApplicationAttemptId applicationAttemptId)
       throws IOException {

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Wed Oct 16 21:07:28 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.u
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.security.AccessController;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
@@ -48,6 +49,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.ContainerLogAppender;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -133,6 +135,30 @@ public class MRApps extends Apps {
     return TaskAttemptStateUI.valueOf(attemptStateStr);
   }
 
+  // gets the base name of the MapReduce framework or null if no
+  // framework was configured
+  private static String getMRFrameworkName(Configuration conf) {
+    String frameworkName = null;
+    String framework =
+        conf.get(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, "");
+    if (!framework.isEmpty()) {
+      URI uri;
+      try {
+        uri = new URI(framework);
+      } catch (URISyntaxException e) {
+        throw new IllegalArgumentException("Unable to parse '" + framework
+            + "' as a URI, check the setting for "
+            + MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, e);
+      }
+
+      frameworkName = uri.getFragment();
+      if (frameworkName == null) {
+        frameworkName = new Path(uri).getName();
+      }
+    }
+    return frameworkName;
+  }
+
   private static void setMRFrameworkClasspath(
       Map<String, String> environment, Configuration conf) throws IOException {
     // Propagate the system classpath when using the mini cluster
@@ -141,18 +167,34 @@ public class MRApps extends Apps {
           System.getProperty("java.class.path"));
     }
 
-    // Add standard Hadoop classes
-    for (String c : conf.getStrings(
-        YarnConfiguration.YARN_APPLICATION_CLASSPATH,
-        YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
-      Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
-          .trim());
+    // if the framework is specified then only use the MR classpath
+    String frameworkName = getMRFrameworkName(conf);
+    if (frameworkName == null) {
+      // Add standard Hadoop classes
+      for (String c : conf.getStrings(
+          YarnConfiguration.YARN_APPLICATION_CLASSPATH,
+          YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
+        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
+            .trim());
+      }
     }
+
+    boolean foundFrameworkInClasspath = (frameworkName == null);
     for (String c : conf.getStrings(
         MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
-        MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH)) {
+        StringUtils.getStrings(
+            MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH))){
       Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
           .trim());
+      if (!foundFrameworkInClasspath) {
+        foundFrameworkInClasspath = c.contains(frameworkName);
+      }
+    }
+
+    if (!foundFrameworkInClasspath) {
+      throw new IllegalArgumentException(
+          "Could not locate MapReduce framework name '" + frameworkName
+          + "' in " + MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH);
     }
     // TODO: Remove duplicates.
   }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java Wed Oct 16 21:07:28 2013
@@ -191,7 +191,9 @@ public class TestMRApps {
     }
     assertTrue(environment.get("CLASSPATH").contains(yarnAppClasspath));
     String mrAppClasspath = 
-        job.getConfiguration().get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH);
+        job.getConfiguration().get(
+            MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
+            MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH);
     if (mrAppClasspath != null) {
       mrAppClasspath = mrAppClasspath.replaceAll(",\\s*", File.pathSeparator)
         .trim();
@@ -282,7 +284,46 @@ public class TestMRApps {
     assertEquals("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app"
       + " classpath!", expectedAppClasspath, appCp);
   }
-  
+
+  @Test (timeout = 3000000)
+  public void testSetClasspathWithFramework() throws IOException {
+    final String FRAMEWORK_NAME = "some-framework-name";
+    final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME;
+    Configuration conf = new Configuration();
+    conf.set(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, FRAMEWORK_PATH);
+    Map<String, String> env = new HashMap<String, String>();
+    try {
+      MRApps.setClasspath(env, conf);
+      fail("Failed to catch framework path set without classpath change");
+    } catch (IllegalArgumentException e) {
+      assertTrue("Unexpected IllegalArgumentException",
+          e.getMessage().contains("Could not locate MapReduce framework name '"
+              + FRAMEWORK_NAME + "'"));
+    }
+
+    env.clear();
+    final String FRAMEWORK_CLASSPATH = FRAMEWORK_NAME + "/*.jar";
+    conf.set(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, FRAMEWORK_CLASSPATH);
+    MRApps.setClasspath(env, conf);
+    final String stdClasspath = StringUtils.join(File.pathSeparator,
+        Arrays.asList("job.jar/job.jar", "job.jar/classes/", "job.jar/lib/*",
+            ApplicationConstants.Environment.PWD.$() + "/*"));
+    String expectedClasspath = StringUtils.join(File.pathSeparator,
+        Arrays.asList(ApplicationConstants.Environment.PWD.$(),
+            FRAMEWORK_CLASSPATH, stdClasspath));
+    assertEquals("Incorrect classpath with framework and no user precedence",
+        expectedClasspath, env.get("CLASSPATH"));
+
+    env.clear();
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
+    MRApps.setClasspath(env, conf);
+    expectedClasspath = StringUtils.join(File.pathSeparator,
+        Arrays.asList(ApplicationConstants.Environment.PWD.$(),
+            stdClasspath, FRAMEWORK_CLASSPATH));
+    assertEquals("Incorrect classpath with framework and user precedence",
+        expectedClasspath, env.get("CLASSPATH"));
+  }
+
   @Test (timeout = 30000)
   public void testSetupDistributedCacheEmpty() throws IOException {
     Configuration conf = new Configuration();

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Wed Oct 16 21:07:28 2013
@@ -173,7 +173,12 @@ public class JobClient extends CLI {
      * job completes.)
      */
     public NetworkedJob(JobStatus status, Cluster cluster) throws IOException {
-      job = Job.getInstance(cluster, status, new JobConf(status.getJobFile()));
+      this(status, cluster, new JobConf(status.getJobFile()));
+    }
+    
+    private NetworkedJob(JobStatus status, Cluster cluster, JobConf conf)
+        throws IOException {
+      this(Job.getInstance(cluster, status, conf));
     }
 
     public NetworkedJob(Job job) throws IOException {
@@ -592,7 +597,8 @@ public class JobClient extends CLI {
       if (job != null) {
         JobStatus status = JobStatus.downgrade(job.getStatus());
         if (status != null) {
-          return new NetworkedJob(status, cluster);
+          return new NetworkedJob(status, cluster,
+              new JobConf(job.getConfiguration()));
         } 
       }
     } catch (InterruptedException ie) {

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java Wed Oct 16 21:07:28 2013
@@ -47,6 +47,13 @@ public class SequenceFileAsBinaryOutputF
    */
   static protected class WritableValueBytes extends org.apache.hadoop.mapreduce
       .lib.output.SequenceFileAsBinaryOutputFormat.WritableValueBytes {
+    public WritableValueBytes() {
+      super();
+    }
+
+    public WritableValueBytes(BytesWritable value) {
+      super(value);
+    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java Wed Oct 16 21:07:28 2013
@@ -24,13 +24,19 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.hadoop.io.compress.SplittableCompressionCodec;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
 /**
@@ -127,4 +133,34 @@ public abstract class CombineFileInputFo
     return result.toArray(new FileStatus[result.size()]);
   }
 
+  /**
+   * Subclasses should avoid overriding this method and should instead only
+   * override {@link #isSplitable(FileSystem, Path)}.  The implementation of
+   * this method simply calls the other method to preserve compatibility.
+   * @see <a href="https://issues.apache.org/jira/browse/MAPREDUCE-5530">
+   * MAPREDUCE-5530</a>
+   *
+   * @param context the job context
+   * @param file the file name to check
+   * @return is this file splitable?
+   */
+  @InterfaceAudience.Private
+  @Override
+  protected boolean isSplitable(JobContext context, Path file) {
+    try {
+      return isSplitable(FileSystem.get(context.getConfiguration()), file);
+    }
+    catch (IOException ioe) {
+      throw new RuntimeException(ioe);
+    }
+  }
+
+  protected boolean isSplitable(FileSystem fs, Path file) {
+    final CompressionCodec codec =
+      new CompressionCodecFactory(fs.getConf()).getCodec(file);
+    if (null == codec) {
+      return true;
+    }
+    return codec instanceof SplittableCompressionCodec;
+  }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobCounter.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobCounter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobCounter.java Wed Oct 16 21:07:28 2013
@@ -34,9 +34,13 @@ public enum JobCounter {
   OTHER_LOCAL_MAPS,
   DATA_LOCAL_MAPS,
   RACK_LOCAL_MAPS,
+  @Deprecated
   SLOTS_MILLIS_MAPS,
+  @Deprecated
   SLOTS_MILLIS_REDUCES,
+  @Deprecated
   FALLOW_SLOTS_MILLIS_MAPS,
+  @Deprecated
   FALLOW_SLOTS_MILLIS_REDUCES,
   TOTAL_LAUNCHED_UBERTASKS,
   NUM_UBER_SUBMAPS,

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Wed Oct 16 21:07:28 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -340,11 +341,12 @@ class JobSubmitter {
 
     //validate the jobs output specs 
     checkSpecs(job);
-    
-    Path jobStagingArea = JobSubmissionFiles.getStagingDir(cluster, 
-                                                     job.getConfiguration());
-    //configure the command line options correctly on the submitting dfs
+
     Configuration conf = job.getConfiguration();
+    addMRFrameworkToDistributedCache(conf);
+
+    Path jobStagingArea = JobSubmissionFiles.getStagingDir(cluster, conf);
+    //configure the command line options correctly on the submitting dfs
     InetAddress ip = InetAddress.getLocalHost();
     if (ip != null) {
       submitHostAddress = ip.getHostAddress();
@@ -602,7 +604,6 @@ class JobSubmitter {
   }
 
   //get secret keys and tokens and store them into TokenCache
-  @SuppressWarnings("unchecked")
   private void populateTokenCache(Configuration conf, Credentials credentials) 
   throws IOException{
     readTokensFromFiles(conf, credentials);
@@ -618,4 +619,41 @@ class JobSubmitter {
       TokenCache.obtainTokensForNamenodes(credentials, ps, conf);
     }
   }
+
+  @SuppressWarnings("deprecation")
+  private static void addMRFrameworkToDistributedCache(Configuration conf)
+      throws IOException {
+    String framework =
+        conf.get(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, "");
+    if (!framework.isEmpty()) {
+      URI uri;
+      try {
+        uri = new URI(framework);
+      } catch (URISyntaxException e) {
+        throw new IllegalArgumentException("Unable to parse '" + framework
+            + "' as a URI, check the setting for "
+            + MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, e);
+      }
+
+      String linkedName = uri.getFragment();
+
+      // resolve any symlinks in the URI path so using a "current" symlink
+      // to point to a specific version shows the specific version
+      // in the distributed cache configuration
+      FileSystem fs = FileSystem.get(conf);
+      Path frameworkPath = fs.makeQualified(
+          new Path(uri.getScheme(), uri.getAuthority(), uri.getPath()));
+      FileContext fc = FileContext.getFileContext(frameworkPath.toUri(), conf);
+      frameworkPath = fc.resolvePath(frameworkPath);
+      uri = frameworkPath.toUri();
+      try {
+        uri = new URI(uri.getScheme(), uri.getAuthority(), uri.getPath(),
+            null, linkedName);
+      } catch (URISyntaxException e) {
+        throw new IllegalArgumentException(e);
+      }
+
+      DistributedCache.addCacheArchive(uri, conf);
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java Wed Oct 16 21:07:28 2013
@@ -84,11 +84,6 @@ public interface MRConfig {
     "mapreduce.shuffle.ssl.enabled";
 
   public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
-  
-  public static final String SSL_ENABLED_KEY =
-      "mapreduce.ssl.enabled";
-  
-  public static final boolean SSL_ENABLED_KEY_DEFAULT = false;
 
   public static final String SHUFFLE_CONSUMER_PLUGIN =
     "mapreduce.job.reduce.shuffle.consumer.plugin.class";

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Wed Oct 16 21:07:28 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.mapreduce;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.Shell;
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
@@ -132,6 +133,13 @@ public interface MRJobConfig {
 
   public static final String MAPREDUCE_JOB_CLASSLOADER = "mapreduce.job.classloader";
 
+  /**
+   * A comma-separated list of services that function as ShuffleProvider aux-services
+   * (in addition to the built-in ShuffleHandler).
+   * These services can serve shuffle requests from reducetasks.
+   */
+  public static final String MAPREDUCE_JOB_SHUFFLE_PROVIDER_SERVICES = "mapreduce.job.shuffle.provider.services";
+
   public static final String MAPREDUCE_JOB_CLASSLOADER_SYSTEM_CLASSES = "mapreduce.job.classloader.system.classes";
 
   public static final String MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG = "mapreduce.jvm.system-properties-to-log";
@@ -650,12 +658,20 @@ public interface MRJobConfig {
       "mapreduce.application.classpath";
 
   /**
+   * Path to MapReduce framework archive
+   */
+  public static final String MAPREDUCE_APPLICATION_FRAMEWORK_PATH =
+      "mapreduce.application.framework.path";
+
+  /**
    * Default CLASSPATH for all YARN MapReduce applications.
    */
-  public static final String[] DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH = {
-      "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*",
-      "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*",
-  };
+  public final String 
+  DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH = Shell.WINDOWS ?
+      "%HADOOP_MAPRED_HOME%\\share\\hadoop\\mapreduce\\*," 
+      + "%HADOOP_MAPRED_HOME%\\share\\hadoop\\mapreduce\\lib\\*" :
+      "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,"
+      + "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*";
 
   public static final String WORKFLOW_ID = "mapreduce.workflow.id";
   

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java Wed Oct 16 21:07:28 2013
@@ -90,7 +90,7 @@ public class FloatSplitter implements DB
     // Catch any overage and create the closed interval for the last split.
     if (curLower <= maxVal || splits.size() == 1) {
       splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
-          lowClausePrefix + Double.toString(curUpper),
+          lowClausePrefix + Double.toString(curLower),
           colName + " <= " + Double.toString(maxVal)));
     }
 

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Wed Oct 16 21:07:28 2013
@@ -54,7 +54,7 @@ import org.apache.hadoop.security.Access
 import org.apache.hadoop.util.ExitUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.yarn.logaggregation.LogDumper;
+import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers;
 
 import com.google.common.base.Charsets;
 
@@ -359,7 +359,7 @@ public class CLI extends Configured impl
         JobID jobID = JobID.forName(jobid);
         TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskid);
         LogParams logParams = cluster.getLogParams(jobID, taskAttemptID);
-        LogDumper logDumper = new LogDumper();
+        LogCLIHelpers logDumper = new LogCLIHelpers();
         logDumper.setConf(getConf());
         exitCode = logDumper.dumpAContainersLogs(logParams.getApplicationId(),
             logParams.getContainerId(), logParams.getNodeId(),

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Wed Oct 16 21:07:28 2013
@@ -290,20 +290,6 @@
 </property>
 
 <property>
-  <name>mapreduce.ssl.enabled</name>
-  <value>false</value>
-  <description>
-	If enabled, MapReduce application master's http server will be
-	started with SSL enabled. Map reduce AM by default doesn't support SSL.
-	If MapReduce jobs want SSL support, it is the user's responsibility to
-	create and manage certificates, keystores and trust-stores with appropriate
-	permissions. This is only for MapReduce application master and is not used
-	by job history server. To enable encrypted shuffle this property is not
-	required, instead refer to (mapreduce.shuffle.ssl.enabled) property.
-  </description>
-</property>
-
-<property>
   <name>mapreduce.shuffle.ssl.file.buffer.size</name>
   <value>65536</value>
   <description>Buffer size for reading spills from file when using SSL.
@@ -1024,9 +1010,38 @@
 
 <property>
   <description>CLASSPATH for MR applications. A comma-separated list
-  of CLASSPATH entries</description>
+  of CLASSPATH entries. If mapreduce.application.framework is set then this
+  must specify the appropriate classpath for that archive, and the name of
+  the archive must be present in the classpath. 
+  When this value is empty, the following default CLASSPATH for MR 
+  applications would be used. 
+  For Linux:
+  $HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,
+  $HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*.
+  For Windows:
+  %HADOOP_MAPRED_HOME%/share/hadoop/mapreduce/*,
+  %HADOOP_MAPRED_HOME%/share/hadoop/mapreduce/lib/*.
+  </description>
    <name>mapreduce.application.classpath</name>
-   <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*</value>
+   <value></value>
+</property>
+
+<property>
+  <description>Path to the MapReduce framework archive. If set, the framework
+    archive will automatically be distributed along with the job, and this
+    path would normally reside in a public location in an HDFS filesystem. As
+    with distributed cache files, this can be a URL with a fragment specifying
+    the alias to use for the archive name. For example,
+    hdfs:/mapred/framework/hadoop-mapreduce-2.1.1.tar.gz#mrframework would
+    alias the localized archive as "mrframework".
+
+    Note that mapreduce.application.classpath must include the appropriate
+    classpath for the specified framework. The base name of the archive, or
+    alias of the archive if an alias is used, must appear in the specified
+    classpath.
+  </description>
+   <name>mapreduce.application.framework.path</name>
+   <value></value>
 </property>
 
 <property>
@@ -1205,4 +1220,14 @@
   storage class.</description>
 </property>
 
+<property>
+  <name>mapreduce.jobhistory.http.policy</name>
+  <value>HTTP_ONLY</value>
+  <description>
+    This configures the HTTP endpoint for JobHistoryServer web UI.
+    The following values are supported:
+    - HTTP_ONLY : Service is provided only on http
+    - HTTPS_ONLY : Service is provided only on https
+  </description>
+</property>
 </configuration>

Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1527684-1532876

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/MapReduce_Compatibility_Hadoop1_Hadoop2.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/MapReduce_Compatibility_Hadoop1_Hadoop2.apt.vm?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/MapReduce_Compatibility_Hadoop1_Hadoop2.apt.vm (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/MapReduce_Compatibility_Hadoop1_Hadoop2.apt.vm Wed Oct 16 21:07:28 2013
@@ -96,12 +96,21 @@ Apache Hadoop MapReduce - Migrating from
 
 * {Malicious}
 
- For the users who are going to try <<<hadoop-examples-1.x.x.jar>>> on YARN,
- please note that <<<hadoop -jar hadoop-examples-1.x.x.jar>>> will still use
- <<<hadoop-mapreduce-examples-2.x.x.jar>>>, which is installed together with
- other MRv2 jars. By default Hadoop framework jars appear before the users'
- jars in the classpath, such that the classes from the 2.x.x jar will still be
- picked. Users should either remove <<<hadoop-mapreduce-examples-2.x.x.jar>>>
- from the classpath or set <<<HADOOP_USER_CLASSPATH_FIRST=true>>> and
- <<<HADOOP_CLASSPATH=...:hadoop-examples-1.x.x.jar>>> to run their target
- examples jar.
+  For the users who are going to try <<<hadoop-examples-1.x.x.jar>>> on YARN,
+  please note that <<<hadoop -jar hadoop-examples-1.x.x.jar>>> will still use
+  <<<hadoop-mapreduce-examples-2.x.x.jar>>>, which is installed together with
+  other MRv2 jars. By default Hadoop framework jars appear before the users'
+  jars in the classpath, such that the classes from the 2.x.x jar will still be
+  picked. Users should remove <<<hadoop-mapreduce-examples-2.x.x.jar>>>
+  from the classpath of all the nodes in a cluster. Otherwise, users need to
+  set <<<HADOOP_USER_CLASSPATH_FIRST=true>>> and
+  <<<HADOOP_CLASSPATH=...:hadoop-examples-1.x.x.jar>>> to run their target
+  examples jar, and add the following configuration in <<<mapred-site.xml>>> to
+  make the processes in YARN containers pick this jar as well.
+
++---+
+    <property>
+        <name>mapreduce.job.user.classpath.first</name>
+        <value>true</value>
+    </property>
++---+

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java Wed Oct 16 21:07:28 2013
@@ -24,7 +24,7 @@ import java.net.URISyntaxException;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.util.TrackingUriPlugin;
 
@@ -54,8 +54,7 @@ public class MapReduceTrackingUriPlugin 
   public URI getTrackingUri(ApplicationId id) throws URISyntaxException {
     String jobSuffix = id.toString().replaceFirst("^application_", "job_");
     String historyServerAddress =
-        this.getConf().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS);
-    return new URI("http://" + historyServerAddress + "/jobhistory/job/"
-        + jobSuffix);
+        MRWebAppUtil.getJHSWebappURLWithScheme(getConf());
+    return new URI(historyServerAddress + "/jobhistory/job/"+ jobSuffix);
   }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Wed Oct 16 21:07:28 2013
@@ -58,6 +58,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
 import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
@@ -142,7 +143,9 @@ public class CompletedJob implements org
     report.setJobFile(getConfFile().toString());
     String historyUrl = "N/A";
     try {
-      historyUrl = JobHistoryUtils.getHistoryUrl(conf, jobId.getAppId());
+      historyUrl =
+          MRWebAppUtil.getApplicationWebURLOnJHSWithoutScheme(conf,
+              jobId.getAppId());
     } catch (UnknownHostException e) {
       //Ignore.
     }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Wed Oct 16 21:07:28 2013
@@ -75,12 +75,12 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.security.authorize.ClientHSPolicyProvider;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.HsWebApp;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -144,10 +144,7 @@ public class HistoryClientService extend
 
   private void initializeWebApp(Configuration conf) {
     webApp = new HsWebApp(history);
-    InetSocketAddress bindAddress = conf.getSocketAddr(
-        JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
-        JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
-        JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);
+    InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf);
     // NOTE: there should be a .at(InetSocketAddress)
     WebApps
         .$for("jobhistory", HistoryClientService.class, this, "ws")
@@ -157,8 +154,9 @@ public class HistoryClientService extend
         .withHttpSpnegoPrincipalKey(
             JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
         .at(NetUtils.getHostPortString(bindAddress)).start(webApp);
-    conf.updateConnectAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
-                           webApp.getListenerAddress());
+    
+    MRWebAppUtil.setJHSWebappURLWithoutScheme(conf,
+        NetUtils.getHostPortString(webApp.getListenerAddress()));
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java Wed Oct 16 21:07:28 2013
@@ -389,7 +389,7 @@ public class JobHistory extends Abstract
   }
 
   @Override
-  public boolean safeToReportTerminationToUser() {
+  public boolean hasSuccessfullyUnregistered() {
     // bogus - Not Required
     return true;
   }

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java Wed Oct 16 21:07:28 2013
@@ -24,13 +24,13 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
 import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService.HistoryServerState;
 import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.security.SecurityUtil;
@@ -118,9 +118,8 @@ public class JobHistoryServer extends Co
     config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true);
 
     // This is required for WebApps to use https if enabled.
-    WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
-        CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
-        CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
+    MRWebAppUtil.initialize(getConfig());
+    HttpConfig.setPolicy(MRWebAppUtil.getJHSHttpPolicy());
     try {
       doSecureLogin(conf);
     } catch(IOException ie) {

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java Wed Oct 16 21:07:28 2013
@@ -18,21 +18,25 @@
 
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
+
 import java.util.Date;
 import java.util.List;
 
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.ResponseInfo;
@@ -41,8 +45,8 @@ import org.apache.hadoop.yarn.webapp.ham
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 import org.apache.hadoop.yarn.webapp.view.InfoBlock;
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+
+import com.google.inject.Inject;
 
 /**
  * Render a block of HTML for a give job.
@@ -133,7 +137,7 @@ public class HsJobBlock extends HtmlBloc
             table.tr((odd = !odd) ? _ODD : _EVEN).
               td(String.valueOf(attempt.getAttemptId())).
               td(new Date(attempt.getStartTime()).toString()).
-              td().a(".nodelink", url(WebAppUtil.getSchemePrefix(),
+              td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(),
                   attempt.getNodeHttpAddress()),
                   attempt.getNodeHttpAddress())._().
               td().a(".logslink", url(attempt.getShortLogsLink()), 

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java Wed Oct 16 21:07:28 2013
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_ID;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
@@ -30,20 +30,17 @@ import static org.apache.hadoop.yarn.web
 import java.util.Collection;
 
 import org.apache.commons.lang.StringEscapeUtils;
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.webapp.App;
-import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TFOOT;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
@@ -149,7 +146,7 @@ public class HsTaskPage extends HsView {
         .append(sortId + " ").append(taid).append("\",\"")
         .append(ta.getState().toString()).append("\",\"")
 
-        .append("<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>")
+        .append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
         .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
 
         .append("<a class='logslink' href='").append(url("logs", nodeIdString

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml Wed Oct 16 21:07:28 2013
@@ -124,6 +124,11 @@
             <goals>
               <goal>test-jar</goal>
             </goals>
+            <configuration>
+              <excludes>
+                <exclude>**/hdfs-site.xml</exclude>
+              </excludes>
+            </configuration>
             <phase>test-compile</phase>
           </execution>
         </executions>

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Wed Oct 16 21:07:28 2013
@@ -33,7 +33,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -424,9 +423,6 @@ public class ClientServiceDelegate {
       String historyTrackingUrl = report.getTrackingUrl();
       String url = StringUtils.isNotEmpty(historyTrackingUrl)
           ? historyTrackingUrl : trackingUrl;
-      if (!UNAVAILABLE.equals(url)) {
-        url = HttpConfig.getSchemePrefix() + url;
-      }
       jobStatus = TypeConverter.fromYarn(report, url);
     }
     return jobStatus;

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java Wed Oct 16 21:07:28 2013
@@ -514,7 +514,7 @@ public class TestClientServiceDelegate {
     jobReport.setMapProgress(1.0f);
     jobReport.setReduceProgress(1.0f);
     jobReport.setJobFile("TestJobFilePath");
-    jobReport.setTrackingUrl("TestTrackingUrl");
+    jobReport.setTrackingUrl("http://TestTrackingUrl");
     jobReportResponse.setJobReport(jobReport);
     return jobReportResponse;
   }



Mime
View raw message