hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1230248 [3/5] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/mai...
Date Wed, 11 Jan 2012 20:53:55 GMT
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java Wed Jan 11 20:53:50 2012
@@ -29,6 +29,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -51,6 +53,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
@@ -71,6 +75,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto;
@@ -214,7 +219,26 @@ public class MRClientProtocolPBClientImp
       }
     }
   }
-
+  
+  @Override
+  public GetDelegationTokenResponse getDelegationToken(
+      GetDelegationTokenRequest request) throws YarnRemoteException {
+    GetDelegationTokenRequestProto requestProto = ((GetDelegationTokenRequestPBImpl)
+        request).getProto();
+    try {
+      return new GetDelegationTokenResponsePBImpl(proxy.getDelegationToken(
+          null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+  
   @Override
   public KillJobResponse killJob(KillJobRequest request)
       throws YarnRemoteException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java Wed Jan 11 20:53:50 2012
@@ -23,6 +23,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
@@ -66,6 +70,8 @@ import org.apache.hadoop.mapreduce.v2.pr
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
@@ -184,7 +190,20 @@ public class MRClientProtocolPBServiceIm
       throw new ServiceException(e);
     }
   }
-
+  
+  @Override
+  public GetDelegationTokenResponseProto getDelegationToken(
+      RpcController controller, GetDelegationTokenRequestProto proto)
+      throws ServiceException {
+    GetDelegationTokenRequest request = new GetDelegationTokenRequestPBImpl(proto);
+    try {
+      GetDelegationTokenResponse response = real.getDelegationToken(request);
+      return ((GetDelegationTokenResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+  
   @Override
   public KillJobResponseProto killJob(RpcController controller,
       KillJobRequestProto proto) throws ServiceException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java Wed Jan 11 20:53:50 2012
@@ -24,7 +24,9 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hadoop.yarn.proto.MRClientProtocol;
 
 public class ClientHSSecurityInfo extends SecurityInfo {
@@ -56,7 +58,22 @@ public class ClientHSSecurityInfo extend
 
   @Override
   public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
-    return null;
-  }
+    if (!protocol
+        .equals(MRClientProtocol.MRClientProtocolService.BlockingInterface.class)) {
+      return null;
+    }
+    return new TokenInfo() {
+
+      @Override
+      public Class<? extends Annotation> annotationType() {
+        return null;
+      }
+
+      @Override
+      public Class<? extends TokenSelector<? extends TokenIdentifier>>
+          value() {
+        return ClientHSTokenSelector.class;
+      }
+    };  }
 
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Wed Jan 11 20:53:50 2012
@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.URI;
+import java.net.URL;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -181,23 +182,31 @@ public class MRApps extends Apps {
       String mrAppGeneratedClasspathFile = "mrapp-generated-classpath";
       classpathFileStream =
           thisClassLoader.getResourceAsStream(mrAppGeneratedClasspathFile);
+
       // Put the file itself on classpath for tasks.
-      String classpathElement = thisClassLoader.getResource(mrAppGeneratedClasspathFile).getFile();
-      if (classpathElement.contains("!")) {
-        classpathElement = classpathElement.substring(0, classpathElement.indexOf("!"));
+      URL classpathResource = thisClassLoader
+        .getResource(mrAppGeneratedClasspathFile);
+      if (classpathResource != null) {
+        String classpathElement = classpathResource.getFile();
+        if (classpathElement.contains("!")) {
+          classpathElement = classpathElement.substring(0,
+            classpathElement.indexOf("!"));
+        } else {
+          classpathElement = new File(classpathElement).getParent();
+        }
+        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
+          classpathElement);
       }
-      else {
-        classpathElement = new File(classpathElement).getParent();
+
+      if (classpathFileStream != null) {
+        reader = new BufferedReader(new InputStreamReader(classpathFileStream));
+        String cp = reader.readLine();
+        if (cp != null) {
+          Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
+            cp.trim());
+        }
       }
-      Apps.addToEnvironment(
-          environment,
-          Environment.CLASSPATH.name(), classpathElement);
-
-      reader = new BufferedReader(new InputStreamReader(classpathFileStream));
-      String cp = reader.readLine();
-      if (cp != null) {
-        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), cp.trim());
-      }      
+
       // Add standard Hadoop classes
       for (String c : ApplicationConstants.APPLICATION_CLASSPATH) {
         Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto Wed Jan 11 20:53:50 2012
@@ -30,7 +30,7 @@ service MRClientProtocolService {
   rpc getTaskAttemptCompletionEvents (GetTaskAttemptCompletionEventsRequestProto) returns (GetTaskAttemptCompletionEventsResponseProto);
   rpc getTaskReports (GetTaskReportsRequestProto) returns (GetTaskReportsResponseProto);
   rpc getDiagnostics (GetDiagnosticsRequestProto) returns (GetDiagnosticsResponseProto);
-
+  rpc getDelegationToken (GetDelegationTokenRequestProto) returns (GetDelegationTokenResponseProto);
   rpc killJob (KillJobRequestProto) returns (KillJobResponseProto);
   rpc killTask (KillTaskRequestProto) returns (KillTaskResponseProto);
   rpc killTaskAttempt (KillTaskAttemptRequestProto) returns (KillTaskAttemptResponseProto);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto Wed Jan 11 20:53:50 2012
@@ -22,6 +22,7 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 
 import "mr_protos.proto";
+import "yarn_protos.proto";
 
 message GetJobReportRequestProto {
   optional JobIdProto job_id = 1;
@@ -75,6 +76,13 @@ message GetDiagnosticsResponseProto {
   repeated string diagnostics = 1;
 }
 
+message GetDelegationTokenRequestProto {
+  optional string renewer = 1;
+}
+
+message GetDelegationTokenResponseProto {
+  optional DelegationTokenProto m_r_delegation_token = 1;
+}
 
 message KillJobRequestProto {
   optional JobIdProto job_id = 1;

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java Wed Jan 11 20:53:50 2012
@@ -30,6 +30,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -123,28 +125,24 @@ public class TestRPCFactories {
     @Override
     public GetJobReportResponse getJobReport(GetJobReportRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskAttemptReportResponse getTaskAttemptReport(
         GetTaskAttemptReportRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetCountersResponse getCounters(GetCountersRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
@@ -152,51 +150,49 @@ public class TestRPCFactories {
     public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
         GetTaskAttemptCompletionEventsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillJobResponse killJob(KillJobRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillTaskResponse killTask(KillTaskRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillTaskAttemptResponse killTaskAttempt(
         KillTaskAttemptRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public FailTaskAttemptResponse failTaskAttempt(
         FailTaskAttemptRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
-    
+
+    @Override
+    public GetDelegationTokenResponse getDelegationToken(
+        GetDelegationTokenRequest request) throws YarnRemoteException {
+      return null;
+    }   
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Wed Jan 11 20:53:50 2012
@@ -140,7 +140,20 @@ import org.apache.hadoop.util.ToolRunner
 public class JobClient extends CLI {
   public static enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
   private TaskStatusFilter taskOutputFilter = TaskStatusFilter.FAILED; 
-
+  /* notes that get delegation token was called. Again this is hack for oozie 
+   * to make sure we add history server delegation tokens to the credentials
+   *  for the job. Since the api only allows one delegation token to be returned, 
+   *  we have to add this hack.
+   */
+  private boolean getDelegationTokenCalled = false;
+  /* notes the renewer that will renew the delegation token */
+  private Text dtRenewer = null;
+  /* do we need a HS delegation token for this client */
+  static final String HS_DELEGATION_TOKEN_REQUIRED 
+      = "mapreduce.history.server.delegationtoken.required";
+  static final String HS_DELEGATION_TOKEN_RENEWER 
+      = "mapreduce.history.server.delegationtoken.renewer";
+  
   static{
     ConfigUtil.loadResources();
   }
@@ -584,6 +597,12 @@ public class JobClient extends CLI {
     try {
       conf.setBooleanIfUnset("mapred.mapper.new-api", false);
       conf.setBooleanIfUnset("mapred.reducer.new-api", false);
+      if (getDelegationTokenCalled) {
+        conf.setBoolean(HS_DELEGATION_TOKEN_REQUIRED, getDelegationTokenCalled);
+        getDelegationTokenCalled = false;
+        conf.set(HS_DELEGATION_TOKEN_RENEWER, dtRenewer.toString());
+        dtRenewer = null;
+      }
       Job job = clientUgi.doAs(new PrivilegedExceptionAction<Job> () {
         @Override
         public Job run() throws IOException, ClassNotFoundException, 
@@ -1170,6 +1189,8 @@ public class JobClient extends CLI {
    */
   public Token<DelegationTokenIdentifier> 
     getDelegationToken(final Text renewer) throws IOException, InterruptedException {
+    getDelegationTokenCalled = true;
+    dtRenewer = renewer;
     return clientUgi.doAs(new 
         PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>() {
       public Token<DelegationTokenIdentifier> run() throws IOException, 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Wed Jan 11 20:53:50 2012
@@ -156,6 +156,8 @@ public interface MRJobConfig {
   
   public static final String TASK_TIMEOUT = "mapreduce.task.timeout";
 
+  public static final String TASK_TIMEOUT_CHECK_INTERVAL_MS = "mapreduce.task.timeout.check-interval-ms";
+  
   public static final String TASK_ID = "mapreduce.task.id";
 
   public static final String TASK_OUTPUT_DIR = "mapreduce.task.output.dir";
@@ -277,6 +279,12 @@ public interface MRJobConfig {
   public static final String JOB_ACL_MODIFY_JOB = "mapreduce.job.acl-modify-job";
 
   public static final String DEFAULT_JOB_ACL_MODIFY_JOB = " ";
+  
+  /* config for tracking the local file where all the credentials for the job
+   * credentials.
+   */
+  public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY = 
+      "mapreduce.job.credentials.binary";
 
   public static final String JOB_SUBMITHOST =
     "mapreduce.job.submithostname";
@@ -367,6 +375,11 @@ public interface MRJobConfig {
   public static final String MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 
     MR_AM_PREFIX  + "job.reduce.preemption.limit";
   public static final float DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 0.5f;
+  
+  /** AM ACL disabled. **/
+  public static final String JOB_AM_ACCESS_DISABLED = 
+    "mapreduce.job.am-access-disabled";
+  public static final boolean DEFAULT_JOB_AM_ACCESS_DISABLED = false;
 
   /**
    * Limit reduces starting until a certain percentage of maps have finished.
@@ -499,6 +512,9 @@ public interface MRJobConfig {
   public static final String MR_JOB_END_NOTIFICATION_URL =
     "mapreduce.job.end-notification.url";
 
+  public static final String MR_JOB_END_NOTIFICATION_PROXY =
+    "mapreduce.job.end-notification.proxy";
+
   public static final String MR_JOB_END_RETRY_ATTEMPTS =
     "mapreduce.job.end-notification.retry.attempts";
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java Wed Jan 11 20:53:50 2012
@@ -18,15 +18,12 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobID;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record successful completion of job
  *
@@ -34,7 +31,18 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class JobFinishedEvent  implements HistoryEvent {
-  private JobFinished datum = new JobFinished();
+
+  private JobFinished datum = null;
+
+  private JobID jobId;
+  private long finishTime;
+  private int finishedMaps;
+  private int finishedReduces;
+  private int failedMaps;
+  private int failedReduces;
+  private Counters mapCounters;
+  private Counters reduceCounters;
+  private Counters totalCounters;
 
   /** 
    * Create an event to record successful job completion
@@ -53,50 +61,75 @@ public class JobFinishedEvent  implement
       int failedMaps, int failedReduces,
       Counters mapCounters, Counters reduceCounters,
       Counters totalCounters) {
-    datum.jobid = new Utf8(id.toString());
-    datum.finishTime = finishTime;
-    datum.finishedMaps = finishedMaps;
-    datum.finishedReduces = finishedReduces;
-    datum.failedMaps = failedMaps;
-    datum.failedReduces = failedReduces;
-    datum.mapCounters =
-      EventWriter.toAvro(mapCounters, "MAP_COUNTERS");
-    datum.reduceCounters =
-      EventWriter.toAvro(reduceCounters, "REDUCE_COUNTERS");
-    datum.totalCounters =
-      EventWriter.toAvro(totalCounters, "TOTAL_COUNTERS");
+    this.jobId = id;
+    this.finishTime = finishTime;
+    this.finishedMaps = finishedMaps;
+    this.finishedReduces = finishedReduces;
+    this.failedMaps = failedMaps;
+    this.failedReduces = failedReduces;
+    this.mapCounters = mapCounters;
+    this.reduceCounters = reduceCounters;
+    this.totalCounters = totalCounters;
   }
 
   JobFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) { this.datum = (JobFinished)datum; }
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new JobFinished();
+      datum.jobid = new Utf8(jobId.toString());
+      datum.finishTime = finishTime;
+      datum.finishedMaps = finishedMaps;
+      datum.finishedReduces = finishedReduces;
+      datum.failedMaps = failedMaps;
+      datum.failedReduces = failedReduces;
+      datum.mapCounters = EventWriter.toAvro(mapCounters, "MAP_COUNTERS");
+      datum.reduceCounters = EventWriter.toAvro(reduceCounters,
+        "REDUCE_COUNTERS");
+      datum.totalCounters = EventWriter.toAvro(totalCounters, "TOTAL_COUNTERS");
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (JobFinished) oDatum;
+    this.jobId = JobID.forName(datum.jobid.toString());
+    this.finishTime = datum.finishTime;
+    this.finishedMaps = datum.finishedMaps;
+    this.finishedReduces = datum.finishedReduces;
+    this.failedMaps = datum.failedMaps;
+    this.failedReduces = datum.failedReduces;
+    this.mapCounters = EventReader.fromAvro(datum.mapCounters);
+    this.reduceCounters = EventReader.fromAvro(datum.reduceCounters);
+    this.totalCounters = EventReader.fromAvro(datum.totalCounters);
+  }
+
   public EventType getEventType() {
     return EventType.JOB_FINISHED;
   }
 
   /** Get the Job ID */
-  public JobID getJobid() { return JobID.forName(datum.jobid.toString()); }
+  public JobID getJobid() { return jobId; }
   /** Get the job finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the number of finished maps for the job */
-  public int getFinishedMaps() { return datum.finishedMaps; }
+  public int getFinishedMaps() { return finishedMaps; }
   /** Get the number of finished reducers for the job */
-  public int getFinishedReduces() { return datum.finishedReduces; }
+  public int getFinishedReduces() { return finishedReduces; }
   /** Get the number of failed maps for the job */
-  public int getFailedMaps() { return datum.failedMaps; }
+  public int getFailedMaps() { return failedMaps; }
   /** Get the number of failed reducers for the job */
-  public int getFailedReduces() { return datum.failedReduces; }
+  public int getFailedReduces() { return failedReduces; }
   /** Get the counters for the job */
   public Counters getTotalCounters() {
-    return EventReader.fromAvro(datum.totalCounters);
+    return totalCounters;
   }
   /** Get the Map counters for the job */
   public Counters getMapCounters() {
-    return EventReader.fromAvro(datum.mapCounters);
+    return mapCounters;
   }
   /** Get the reduce counters for the job */
   public Counters getReduceCounters() {
-    return EventReader.fromAvro(datum.reduceCounters);
+    return reduceCounters;
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java Wed Jan 11 20:53:50 2012
@@ -34,8 +34,25 @@ import org.apache.hadoop.mapreduce.TaskT
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class MapAttemptFinishedEvent  implements HistoryEvent {
-  private MapAttemptFinished datum = new MapAttemptFinished();
-  
+
+  private MapAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long finishTime;
+  private String hostname;
+  private String rackName;
+  private int port;
+  private long mapFinishTime;
+  private String state;
+  private Counters counters;
+  int[][] allSplits;
+  int[] clockSplits;
+  int[] cpuUsages;
+  int[] vMemKbytes;
+  int[] physMemKbytes;
+
   /** 
    * Create an event for successful completion of map attempts
    * @param id Task Attempt ID
@@ -60,33 +77,21 @@ public class MapAttemptFinishedEvent  im
       (TaskAttemptID id, TaskType taskType, String taskStatus, 
        long mapFinishTime, long finishTime, String hostname, int port, 
        String rackName, String state, Counters counters, int[][] allSplits) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.mapFinishTime = mapFinishTime;
-    datum.finishTime = finishTime;
-    datum.hostname = new Utf8(hostname);
-    datum.port = port;
-    // This is needed for reading old jh files
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
-
-    datum.clockSplits
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetWallclockTime(allSplits));
-    datum.cpuUsages 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetCPUTime(allSplits));
-    datum.vMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetVMemKbytes(allSplits));
-    datum.physMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits));
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.mapFinishTime = mapFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.rackName = rackName;
+    this.port = port;
+    this.state = state;
+    this.counters = counters;
+    this.allSplits = allSplits;
+    this.clockSplits = ProgressSplitsBlock.arrayGetWallclockTime(allSplits);
+    this.cpuUsages = ProgressSplitsBlock.arrayGetCPUTime(allSplits);
+    this.vMemKbytes = ProgressSplitsBlock.arrayGetVMemKbytes(allSplits);
+    this.physMemKbytes = ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits);
   }
 
   /** 
@@ -117,57 +122,100 @@ public class MapAttemptFinishedEvent  im
   
   MapAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (MapAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new MapAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.mapFinishTime = mapFinishTime;
+      datum.finishTime = finishTime;
+      datum.hostname = new Utf8(hostname);
+      datum.port = port;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+
+      datum.clockSplits = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetWallclockTime(allSplits));
+      datum.cpuUsages = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetCPUTime(allSplits));
+      datum.vMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetVMemKbytes(allSplits));
+      datum.physMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetPhysMemKbytes(allSplits));
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (MapAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.mapFinishTime = datum.mapFinishTime;
+    this.finishTime = datum.finishTime;
+    this.hostname = datum.hostname.toString();
+    this.rackName = datum.rackname.toString();
+    this.port = datum.port;
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
+    this.clockSplits = AvroArrayUtils.fromAvro(datum.clockSplits);
+    this.cpuUsages = AvroArrayUtils.fromAvro(datum.cpuUsages);
+    this.vMemKbytes = AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    this.physMemKbytes = AvroArrayUtils.fromAvro(datum.physMemKbytes);
   }
 
   /** Get the task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return attemptId;
   }
+
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the map phase finish time */
-  public long getMapFinishTime() { return datum.mapFinishTime; }
+  public long getMapFinishTime() { return mapFinishTime; }
   /** Get the attempt finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the host name */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   /** Get the tracker rpc port */
-  public int getPort() { return datum.port; }
+  public int getPort() { return port; }
   
   /** Get the rack name */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
    public EventType getEventType() {
     return EventType.MAP_ATTEMPT_FINISHED;
   }
 
   public int[] getClockSplits() {
-    return AvroArrayUtils.fromAvro(datum.clockSplits);
+    return clockSplits;
   }
   public int[] getCpuUsages() {
-    return AvroArrayUtils.fromAvro(datum.cpuUsages);
+    return cpuUsages;
   }
   public int[] getVMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    return vMemKbytes;
   }
   public int[] getPhysMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.physMemKbytes);
+    return physMemKbytes;
   }
   
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java Wed Jan 11 20:53:50 2012
@@ -34,8 +34,25 @@ import org.apache.hadoop.mapreduce.TaskT
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class ReduceAttemptFinishedEvent  implements HistoryEvent {
-  private ReduceAttemptFinished datum =
-    new ReduceAttemptFinished();
+
+  private ReduceAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long shuffleFinishTime;
+  private long sortFinishTime;
+  private long finishTime;
+  private String hostname;
+  private String rackName;
+  private int port;
+  private String state;
+  private Counters counters;
+  int[][] allSplits;
+  int[] clockSplits;
+  int[] cpuUsages;
+  int[] vMemKbytes;
+  int[] physMemKbytes;
 
   /**
    * Create an event to record completion of a reduce attempt
@@ -60,33 +77,22 @@ public class ReduceAttemptFinishedEvent 
      long shuffleFinishTime, long sortFinishTime, long finishTime,
      String hostname, int port,  String rackName, String state, 
      Counters counters, int[][] allSplits) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.shuffleFinishTime = shuffleFinishTime;
-    datum.sortFinishTime = sortFinishTime;
-    datum.finishTime = finishTime;
-    datum.hostname = new Utf8(hostname);
-    datum.port = port;
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
-
-    datum.clockSplits 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetWallclockTime(allSplits));
-    datum.cpuUsages 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetCPUTime(allSplits));
-    datum.vMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetVMemKbytes(allSplits));
-    datum.physMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits));
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.shuffleFinishTime = shuffleFinishTime;
+    this.sortFinishTime = sortFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.rackName = rackName;
+    this.port = port;
+    this.state = state;
+    this.counters = counters;
+    this.allSplits = allSplits;
+    this.clockSplits = ProgressSplitsBlock.arrayGetWallclockTime(allSplits);
+    this.cpuUsages = ProgressSplitsBlock.arrayGetCPUTime(allSplits);
+    this.vMemKbytes = ProgressSplitsBlock.arrayGetVMemKbytes(allSplits);
+    this.physMemKbytes = ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits);
   }
 
   /**
@@ -117,43 +123,87 @@ public class ReduceAttemptFinishedEvent 
 
   ReduceAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (ReduceAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new ReduceAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.shuffleFinishTime = shuffleFinishTime;
+      datum.sortFinishTime = sortFinishTime;
+      datum.finishTime = finishTime;
+      datum.hostname = new Utf8(hostname);
+      datum.port = port;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+
+      datum.clockSplits = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetWallclockTime(allSplits));
+      datum.cpuUsages = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetCPUTime(allSplits));
+      datum.vMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetVMemKbytes(allSplits));
+      datum.physMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetPhysMemKbytes(allSplits));
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (ReduceAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.shuffleFinishTime = datum.shuffleFinishTime;
+    this.sortFinishTime = datum.sortFinishTime;
+    this.finishTime = datum.finishTime;
+    this.hostname = datum.hostname.toString();
+    this.rackName = datum.rackname.toString();
+    this.port = datum.port;
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
+    this.clockSplits = AvroArrayUtils.fromAvro(datum.clockSplits);
+    this.cpuUsages = AvroArrayUtils.fromAvro(datum.cpuUsages);
+    this.vMemKbytes = AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    this.physMemKbytes = AvroArrayUtils.fromAvro(datum.physMemKbytes);
   }
 
   /** Get the Task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return TaskAttemptID.forName(attemptId.toString());
   }
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the finish time of the sort phase */
-  public long getSortFinishTime() { return datum.sortFinishTime; }
+  public long getSortFinishTime() { return sortFinishTime; }
   /** Get the finish time of the shuffle phase */
-  public long getShuffleFinishTime() { return datum.shuffleFinishTime; }
+  public long getShuffleFinishTime() { return shuffleFinishTime; }
   /** Get the finish time of the attempt */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the name of the host where the attempt ran */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   /** Get the tracker rpc port */
-  public int getPort() { return datum.port; }
+  public int getPort() { return port; }
   
   /** Get the rack name of the node where the attempt ran */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters for the attempt */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
   public EventType getEventType() {
     return EventType.REDUCE_ATTEMPT_FINISHED;
@@ -161,16 +211,16 @@ public class ReduceAttemptFinishedEvent 
 
 
   public int[] getClockSplits() {
-    return AvroArrayUtils.fromAvro(datum.clockSplits);
+    return clockSplits;
   }
   public int[] getCpuUsages() {
-    return AvroArrayUtils.fromAvro(datum.cpuUsages);
+    return cpuUsages;
   }
   public int[] getVMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    return vMemKbytes;
   }
   public int[] getPhysMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.physMemKbytes);
+    return physMemKbytes;
   }
 
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java Wed Jan 11 20:53:50 2012
@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
@@ -27,8 +26,6 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record successful task completion
  *
@@ -36,7 +33,17 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class TaskAttemptFinishedEvent  implements HistoryEvent {
-  private TaskAttemptFinished datum = new TaskAttemptFinished();
+
+  private TaskAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long finishTime;
+  private String rackName;
+  private String hostname;
+  private String state;
+  private Counters counters;
 
   /**
    * Create an event to record successful finishes for setup and cleanup 
@@ -53,52 +60,73 @@ public class TaskAttemptFinishedEvent  i
       TaskType taskType, String taskStatus, 
       long finishTime, String rackName,
       String hostname, String state, Counters counters) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.finishTime = finishTime;
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.hostname = new Utf8(hostname);
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.finishTime = finishTime;
+    this.rackName = rackName;
+    this.hostname = hostname;
+    this.state = state;
+    this.counters = counters;
   }
 
   TaskAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (TaskAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new TaskAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.finishTime = finishTime;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.hostname = new Utf8(hostname);
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+    }
+    return datum;
+  }
+  public void setDatum(Object oDatum) {
+    this.datum = (TaskAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.finishTime = datum.finishTime;
+    this.rackName = datum.rackname.toString();
+    this.hostname = datum.hostname.toString();
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
   }
 
   /** Get the task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the task attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return TaskAttemptID.forName(attemptId.toString());
   }
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the attempt finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the host where the attempt executed */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   
   /** Get the rackname where the attempt executed */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters for the attempt */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
   public EventType getEventType() {
     // Note that the task type can be setup/map/reduce/cleanup but the 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java Wed Jan 11 20:53:50 2012
@@ -18,16 +18,13 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record the successful completion of a task
  *
@@ -35,7 +32,14 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class TaskFinishedEvent implements HistoryEvent {
-  private TaskFinished datum = new TaskFinished();
+
+  private TaskFinished datum = null;
+
+  private TaskID taskid;
+  private long finishTime;
+  private TaskType taskType;
+  private String status;
+  private Counters counters;
   
   /**
    * Create an event to record the successful completion of a task
@@ -48,32 +52,48 @@ public class TaskFinishedEvent implement
   public TaskFinishedEvent(TaskID id, long finishTime,
                            TaskType taskType,
                            String status, Counters counters) {
-    datum.taskid = new Utf8(id.toString());
-    datum.finishTime = finishTime;
-    datum.counters = EventWriter.toAvro(counters);
-    datum.taskType = new Utf8(taskType.name());
-    datum.status = new Utf8(status);
+    this.taskid = id;
+    this.finishTime = finishTime;
+    this.taskType = taskType;
+    this.status = status;
+    this.counters = counters;
   }
   
   TaskFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (TaskFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new TaskFinished();
+      datum.taskid = new Utf8(taskid.toString());
+      datum.finishTime = finishTime;
+      datum.counters = EventWriter.toAvro(counters);
+      datum.taskType = new Utf8(taskType.name());
+      datum.status = new Utf8(status);
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (TaskFinished)oDatum;
+    this.taskid = TaskID.forName(datum.taskid.toString());
+    this.finishTime = datum.finishTime;
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.status = datum.status.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
   }
 
   /** Get task id */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return TaskID.forName(taskid.toString()); }
   /** Get the task finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get task counters */
-  public Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  public Counters getCounters() { return counters; }
   /** Get task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get task status */
-  public String getTaskStatus() { return datum.status.toString(); }
+  public String getTaskStatus() { return status.toString(); }
   /** Get event type */
   public EventType getEventType() {
     return EventType.TASK_FINISHED;

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java Wed Jan 11 20:53:50 2012
@@ -32,6 +32,7 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Master;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -101,7 +102,7 @@ public class TokenCache {
     String delegTokenRenewer = Master.getMasterPrincipal(conf);
     if (delegTokenRenewer == null || delegTokenRenewer.length() == 0) {
       throw new IOException(
-          "Can't get JobTracker Kerberos principal for use as renewer");
+          "Can't get Master Kerberos principal for use as renewer");
     }
     boolean readFile = true;
 
@@ -112,7 +113,7 @@ public class TokenCache {
       if (readFile) {
         readFile = false;
         String binaryTokenFilename =
-          conf.get("mapreduce.job.credentials.binary");
+          conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
         if (binaryTokenFilename != null) {
           Credentials binary;
           try {
@@ -172,10 +173,14 @@ public class TokenCache {
   @InterfaceAudience.Private
   public static Token<DelegationTokenIdentifier> getDelegationToken(
       Credentials credentials, String namenode) {
+    //No fs specific tokens issues by this fs. It may however issue tokens
+    // for other filesystems - which would be keyed by that filesystems name.
+    if (namenode == null)  
+      return null;
     return (Token<DelegationTokenIdentifier>) credentials.getToken(new Text(
         namenode));
   }
-  
+
   /**
    * load job token from a file
    * @param conf

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jan 11 20:53:50 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1229278
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1230204
 /hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
 /hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java Wed Jan 11 20:53:50 2012
@@ -130,7 +130,7 @@ public class TestTokenCache {
   private FileSystem setupMultiFs(final FileSystem singleFs,
       final String renewer, final Credentials credentials) throws Exception {
     FileSystem mockFs = mock(FileSystem.class);
-    when(mockFs.getCanonicalServiceName()).thenReturn("multifs");
+    when(mockFs.getCanonicalServiceName()).thenReturn(null);
     when(mockFs.getUri()).thenReturn(new URI("multifs:///"));
 
     when(mockFs.getDelegationTokens(any(String.class))).thenThrow(

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Wed Jan 11 20:53:50 2012
@@ -32,13 +32,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobACLsManager;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -89,7 +89,7 @@ public class CompletedJob implements org
     
     loadFullHistoryData(loadTasks, historyFile);
     user = userName;
-    counters = TypeConverter.toYarn(jobInfo.getTotalCounters());
+    counters = jobInfo.getTotalCounters();
     diagnostics.add(jobInfo.getErrorInfo());
     report =
         RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
@@ -121,7 +121,7 @@ public class CompletedJob implements org
   }
 
   @Override
-  public Counters getCounters() {
+  public Counters getAllCounters() {
     return counters;
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java Wed Jan 11 20:53:50 2012
@@ -24,10 +24,10 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
@@ -60,7 +60,7 @@ public class CompletedTask implements Ta
     this.finishTime = taskInfo.getFinishTime();
     this.type = TypeConverter.toYarn(taskInfo.getTaskType());
     if (taskInfo.getCounters() != null)
-      this.counters = TypeConverter.toYarn(taskInfo.getCounters());
+      this.counters = taskInfo.getCounters();
     if (taskInfo.getTaskStatus() != null) {
       this.state = TaskState.valueOf(taskInfo.getTaskStatus());
     } else {
@@ -86,7 +86,7 @@ public class CompletedTask implements Ta
     report.setFinishTime(finishTime);
     report.setTaskState(state);
     report.setProgress(getProgress());
-    report.setCounters(getCounters());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
     report.addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java Wed Jan 11 20:53:50 2012
@@ -21,9 +21,9 @@ package org.apache.hadoop.mapreduce.v2.h
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
@@ -46,8 +46,9 @@ public class CompletedTaskAttempt implem
   CompletedTaskAttempt(TaskId taskId, TaskAttemptInfo attemptInfo) {
     this.attemptInfo = attemptInfo;
     this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
-    if (attemptInfo.getCounters() != null)
-      this.counters = TypeConverter.toYarn(attemptInfo.getCounters());
+    if (attemptInfo.getCounters() != null) {
+      this.counters = attemptInfo.getCounters();
+    }
     if (attemptInfo.getTaskStatus() != null) {
       this.state = TaskAttemptState.valueOf(attemptInfo.getTaskStatus());
     } else {
@@ -61,7 +62,6 @@ public class CompletedTaskAttempt implem
     }
     
     report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptReport.class);
-    report.setCounters(counters);
     
     report.setTaskAttemptId(attemptId);
     report.setTaskAttemptState(state);
@@ -78,7 +78,7 @@ public class CompletedTaskAttempt implem
     }
 //    report.setPhase(attemptInfo.get); //TODO
     report.setStateString(attemptInfo.getState());
-    report.setCounters(getCounters());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
     report.setContainerId(attemptInfo.getContainerId());
     if (attemptInfo.getHostname() == null) {
       report.setNodeManagerHost("UNKNOWN");

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Wed Jan 11 20:53:50 2012
@@ -33,11 +33,15 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -67,13 +71,17 @@ import org.apache.hadoop.mapreduce.v2.hs
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.DelegationToken;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
 import org.apache.hadoop.yarn.service.AbstractService;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
 
@@ -91,11 +99,14 @@ public class HistoryClientService extend
   private WebApp webApp;
   private InetSocketAddress bindAddress;
   private HistoryContext history;
-
-  public HistoryClientService(HistoryContext history) {
+  private JHSDelegationTokenSecretManager jhsDTSecretManager;
+  
+  public HistoryClientService(HistoryContext history,
+      JHSDelegationTokenSecretManager jhsDTSecretManager) {
     super("HistoryClientService");
     this.history = history;
     this.protocolHandler = new MRClientProtocolHandler();
+    this.jhsDTSecretManager = jhsDTSecretManager;
   }
 
   public void start() {
@@ -109,14 +120,15 @@ public class HistoryClientService extend
       JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
     InetAddress hostNameResolved = null;
     try {
-      hostNameResolved = InetAddress.getLocalHost(); //address.getAddress().getLocalHost();
+      hostNameResolved = InetAddress.getLocalHost(); 
+      //address.getAddress().getLocalHost();
     } catch (UnknownHostException e) {
       throw new YarnException(e);
     }
 
     server =
         rpc.getServer(MRClientProtocol.class, protocolHandler, address,
-            conf, null,
+            conf, jhsDTSecretManager,
             conf.getInt(JHAdminConfig.MR_HISTORY_CLIENT_THREAD_COUNT,
                 JHAdminConfig.DEFAULT_MR_HISTORY_CLIENT_THREAD_COUNT));
 
@@ -190,7 +202,7 @@ public class HistoryClientService extend
       JobId jobId = request.getJobId();
       Job job = verifyAndGetJob(jobId);
       GetCountersResponse response = recordFactory.newRecordInstance(GetCountersResponse.class);
-      response.setCounters(job.getCounters());
+      response.setCounters(TypeConverter.toYarn(job.getAllCounters()));
       return response;
     }
 
@@ -277,6 +289,38 @@ public class HistoryClientService extend
       }
       return response;
     }
+    
+    @Override
+    public GetDelegationTokenResponse getDelegationToken(
+        GetDelegationTokenRequest request) throws YarnRemoteException {
+
+      try {
+      // Verify that the connection is kerberos authenticated
+      AuthenticationMethod authMethod = UserGroupInformation
+        .getRealAuthenticationMethod(UserGroupInformation.getCurrentUser());
+      if (UserGroupInformation.isSecurityEnabled()
+          && (authMethod != AuthenticationMethod.KERBEROS)) {
+       throw new IOException(
+          "Delegation Token can be issued only with kerberos authentication");
+      }
+
+      GetDelegationTokenResponse response = recordFactory.newRecordInstance(
+          GetDelegationTokenResponse.class);
+      MRDelegationTokenIdentifier tokenIdentifier =
+          new MRDelegationTokenIdentifier();
+      Token<MRDelegationTokenIdentifier> realJHSToken =
+          new Token<MRDelegationTokenIdentifier>(tokenIdentifier,
+              jhsDTSecretManager);
+      DelegationToken mrDToken = BuilderUtils.newDelegationToken(
+        realJHSToken.getIdentifier(), realJHSToken.getKind().toString(),
+        realJHSToken.getPassword(), bindAddress.getAddress().getHostAddress()
+            + ":" + bindAddress.getPort());
+      response.setDelegationToken(mrDToken);
+      return response;
+      } catch (IOException i) {
+        throw RPCUtil.getRemoteException(i);
+      }
+    }
 
     private void checkAccess(Job job, JobACL jobOperation)
         throws YarnRemoteException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java Wed Jan 11 20:53:50 2012
@@ -24,6 +24,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.StringUtils;
@@ -41,6 +42,7 @@ public class JobHistoryServer extends Co
   private HistoryContext historyContext;
   private HistoryClientService clientService;
   private JobHistory jobHistoryService;
+  private JHSDelegationTokenSecretManager jhsDTSecretManager;
 
   public JobHistoryServer() {
     super(JobHistoryServer.class.getName());
@@ -56,17 +58,52 @@ public class JobHistoryServer extends Co
     }
     jobHistoryService = new JobHistory();
     historyContext = (HistoryContext)jobHistoryService;
-    clientService = new HistoryClientService(historyContext);
+    this.jhsDTSecretManager = createJHSSecretManager(conf);
+    clientService = new HistoryClientService(historyContext, 
+        this.jhsDTSecretManager);
     addService(jobHistoryService);
     addService(clientService);
     super.init(config);
   }
 
+  protected JHSDelegationTokenSecretManager createJHSSecretManager(
+      Configuration conf) {
+    long secretKeyInterval = 
+        conf.getLong(MRConfig.DELEGATION_KEY_UPDATE_INTERVAL_KEY, 
+                     MRConfig.DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT);
+      long tokenMaxLifetime =
+        conf.getLong(MRConfig.DELEGATION_TOKEN_MAX_LIFETIME_KEY,
+                     MRConfig.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT);
+      long tokenRenewInterval =
+        conf.getLong(MRConfig.DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 
+                     MRConfig.DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT);
+      
+    return new JHSDelegationTokenSecretManager(secretKeyInterval, 
+        tokenMaxLifetime, tokenRenewInterval, 3600000);
+  }
+  
   protected void doSecureLogin(Configuration conf) throws IOException {
     SecurityUtil.login(conf, JHAdminConfig.MR_HISTORY_KEYTAB,
         JHAdminConfig.MR_HISTORY_PRINCIPAL);
   }
 
+  @Override
+  public void start() {
+    try {
+      jhsDTSecretManager.startThreads();
+    } catch(IOException io) {
+      LOG.error("Error while starting the Secret Manager threads", io);
+      throw new RuntimeException(io);
+    }
+    super.start();
+  }
+  
+  @Override
+  public void stop() {
+    jhsDTSecretManager.stopThreads();
+    super.stop();
+  }
+  
   public static void main(String[] args) {
     StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
     try {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java Wed Jan 11 20:53:50 2012
@@ -22,9 +22,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -95,7 +95,7 @@ public class PartialJob implements org.a
   }
 
   @Override
-  public Counters getCounters() {
+  public Counters getAllCounters() {
     return null;
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java Wed Jan 11 20:53:50 2012
@@ -229,7 +229,7 @@ public class HsWebServices {
   }
 
   @GET
-  @Path("/mapreduce/jobs/{jobid}/attempts")
+  @Path("/mapreduce/jobs/{jobid}/jobattempts")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java Wed Jan 11 20:53:50 2012
@@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 
-@XmlRootElement(name = "amAttempt")
+@XmlRootElement(name = "jobAttempt")
 @XmlAccessorType(XmlAccessType.FIELD)
 public class AMAttemptInfo {
 
@@ -52,12 +52,14 @@ public class AMAttemptInfo {
     this.nodeHttpAddress = "";
     this.nodeId = "";
     String nmHost = amInfo.getNodeManagerHost();
-    int nmPort = amInfo.getNodeManagerHttpPort();
+    int nmHttpPort = amInfo.getNodeManagerHttpPort();
+    int nmPort = amInfo.getNodeManagerPort();
     if (nmHost != null) {
-      this.nodeHttpAddress = nmHost + ":" + nmPort;
+      this.nodeHttpAddress = nmHost + ":" + nmHttpPort;
       NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
       this.nodeId = nodeId.toString();
     }
+
     this.id = amInfo.getAppAttemptId().getAttemptId();
     this.startTime = amInfo.getStartTime();
     this.containerId = "";

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java?rev=1230248&r1=1230247&r2=1230248&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java Wed Jan 11 20:53:50 2012
@@ -21,12 +21,14 @@ import java.util.ArrayList;
 
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
-@XmlRootElement(name = "attempts")
+@XmlRootElement(name = "jobAttempts")
 @XmlAccessorType(XmlAccessType.FIELD)
 public class AMAttemptsInfo {
 
+  @XmlElement(name = "jobAttempt")
   protected ArrayList<AMAttemptInfo> attempt = new ArrayList<AMAttemptInfo>();
 
   public AMAttemptsInfo() {



Mime
View raw message