hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vino...@apache.org
Subject svn commit: r920250 - in /hadoop/mapreduce/trunk: ./ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/mapreduce/jobhistory/ src/test/mapred/org/apache/hadoop/mapred/ src/tools/org/apache/hadoop/tools/rumen/ src/webapps/job/
Date Mon, 08 Mar 2010 09:43:18 GMT
Author: vinodkv
Date: Mon Mar  8 09:43:18 2010
New Revision: 920250

URL: http://svn.apache.org/viewvc?rev=920250&view=rev
Log:
MAPREDUCE-1493. Authorization for job-history pages. Contributed by Vinod Kumar Vavilapalli.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobACLsManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLogServlet.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/Events.avpr
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java
    hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobdetails.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Mon Mar  8 09:43:18 2010
@@ -10,6 +10,8 @@
     MAPREDUCE-1385. Use the new UserGroupInformation from HADOOP-6299.
     (ddas via omalley)
 
+    MAPREDUCE-1493. Authorization for job-history pages. (vinodkv)
+
   NEW FEATURES
 
     MAPREDUCE-1383. Automates fetching of delegation tokens in File*Formats

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java Mon Mar  8 09:43:18 2010
@@ -34,32 +34,26 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.StringUtils;
 
 class JSPUtil {
   static final String PRIVATE_ACTIONS_KEY = "webinterface.private.actions";
-  
-  // This conf is not from jobtracker, not from tasktracker. So may not
-  // contain PRIVATE_ACTIONS_KEY set to true even if we set in conf object used
-  // by jobtracker. So use this conf with caution.
-  public static final Configuration conf = new Configuration();
 
   //LRU based cache
   private static final Map<String, JobInfo> jobHistoryCache = 
-    new LinkedHashMap<String, JobInfo>(); 
-
-  private static final int CACHE_SIZE = 
-    conf.getInt(JTConfig.JT_JOBHISTORY_CACHE_SIZE, 5);
+    new LinkedHashMap<String, JobInfo>();
 
   private static final Log LOG = LogFactory.getLog(JSPUtil.class);
 
@@ -399,8 +393,7 @@
         sb.append("<tr>");
         sb.append(
             "<td id=\"job_" + rowId + "\">" + 
-              "<a href=\"jobdetailshistory.jsp?jobid=" + 
-              status.getJobId() + "&logFile=" + 
+              "<a href=\"jobdetailshistory.jsp?logFile=" + 
               URLEncoder.encode(status.getHistoryFile().toString(), 
                 "UTF-8") + "\">" + 
               status.getJobId() + "</a></td>" +
@@ -443,8 +436,7 @@
       StringBuilder sb = new StringBuilder();
       sb.append("<retired_job rowid=\"" + rowId + "\" jobid=\"" + status.getJobId() + "\">");
       sb.append("<jobid>" + status.getJobId() + "</jobid>");
-      sb.append("<history_url>jobdetailshistory.jsp?jobid=" + status.getJobId()
-          + "&amp;logFile="
+      sb.append("<history_url>jobdetailshistory.jsp?logFile="
           + URLEncoder.encode(status.getHistoryFile().toString(), "UTF-8")
           + "</history_url>");
       sb.append("<priority>" + status.getJobPriority().toString()
@@ -472,12 +464,30 @@
     return conf.getBoolean(PRIVATE_ACTIONS_KEY, false);
   }
 
-  static JobInfo getJobInfo(HttpServletRequest request, FileSystem fs) 
-      throws IOException {
-    String jobid = request.getParameter("jobid");
-    String logFile = request.getParameter("logFile");
+  static Path getJobConfFilePath(Path logFile) {
+    Path logDir = logFile.getParent();
+    org.apache.hadoop.mapreduce.JobID jobId =
+      JobHistory.getJobIDFromHistoryFilePath(logFile);
+    return JobHistory.getConfFile(logDir, jobId);
+  }
+
+  /**
+   * Read a job-history log file and construct the corresponding {@link JobInfo}
+   * . Also cache the {@link JobInfo} for quick serving further requests.
+   * 
+   * @param logFile
+   * @param fs
+   * @param jobTracker
+   * @return JobInfo
+   * @throws IOException
+   */
+  static JobInfo getJobInfo(Path logFile, FileSystem fs,
+      JobTracker jobTracker) throws IOException {
+    String jobid =
+        JobHistory.getJobIDFromHistoryFilePath(logFile).toString();
+    JobInfo jobInfo = null;
     synchronized(jobHistoryCache) {
-      JobInfo jobInfo = jobHistoryCache.remove(jobid);
+      jobInfo = jobHistoryCache.remove(jobid);
       if (jobInfo == null) {
         JobHistoryParser parser = new JobHistoryParser(fs, logFile);
         jobInfo = parser.parse();
@@ -485,6 +495,8 @@
             jobHistoryCache.size());
       }
       jobHistoryCache.put(jobid, jobInfo);
+      int CACHE_SIZE =
+          jobTracker.conf.getInt(JTConfig.JT_JOBHISTORY_CACHE_SIZE, 5);
       if (jobHistoryCache.size() > CACHE_SIZE) {
         Iterator<Map.Entry<String, JobInfo>> it = 
           jobHistoryCache.entrySet().iterator();
@@ -492,7 +504,88 @@
         it.remove();
         LOG.info("Job History file removed form cache "+removeJobId);
       }
-      return jobInfo;
+    }
+
+    jobTracker.getJobACLsManager().checkAccess(JobID.forName(jobid),
+        UserGroupInformation.getCurrentUser(), JobACL.VIEW_JOB,
+        jobInfo.getUsername(), jobInfo.getJobACLs().get(JobACL.VIEW_JOB));
+    return jobInfo;
+  }
+
+  /**
+   * Check the access for users to view job-history pages.
+   * 
+   * @param request
+   * @param response
+   * @param jobTracker
+   * @param fs
+   * @param logFile
+   * @return the job if authorization is disabled or if the authorization checks
+   *         pass. Otherwise return null.
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws ServletException
+   */
+  static JobInfo checkAccessAndGetJobInfo(HttpServletRequest request,
+      HttpServletResponse response, final JobTracker jobTracker,
+      final FileSystem fs, final Path logFile) throws IOException,
+      InterruptedException, ServletException {
+    String jobid =
+        JobHistory.getJobIDFromHistoryFilePath(logFile).toString();
+    String user = request.getRemoteUser();
+    JobInfo job = null;
+    if (user != null) {
+      try {
+        final UserGroupInformation ugi =
+            UserGroupInformation.createRemoteUser(user);
+        job =
+            ugi.doAs(new PrivilegedExceptionAction<JobHistoryParser.JobInfo>() {
+              public JobInfo run() throws IOException {
+                // checks job view permission
+                JobInfo jobInfo = JSPUtil.getJobInfo(logFile, fs, jobTracker);
+                return jobInfo;
+              }
+            });
+      } catch (AccessControlException e) {
+        String errMsg =
+            String.format(
+                "User %s failed to view %s!<br><br>%s"
+                    + "<hr>"
+                    + "<a href=\"jobhistory.jsp\">Go back to JobHistory</a><br>"
+                    + "<a href=\"jobtracker.jsp\">Go back to JobTracker</a>",
+                user, jobid, e.getMessage());
+        JSPUtil.setErrorAndForward(errMsg, request, response);
+        return null;
+      }
+    } else {
+      // no authorization needed
+      job = JSPUtil.getJobInfo(logFile, fs, jobTracker);
+    }
+    return job;
+  }
+
+  /**
+   * Nicely print the Job-ACLs
+   * @param tracker
+   * @param jobAcls
+   * @param out
+   * @throws IOException
+   */
+  static void printJobACLs(JobTracker tracker,
+      Map<JobACL, AccessControlList> jobAcls, JspWriter out)
+      throws IOException {
+    if (tracker.isJobLevelAuthorizationEnabled()) {
+      // Display job-view-acls and job-modify-acls configured for this job
+      out.print("<b>Job-ACLs:</b><br>");
+      for (JobACL aclName : JobACL.values()) {
+        String aclConfigName = aclName.getAclName();
+        AccessControlList aclConfigured = jobAcls.get(aclName);
+        if (aclConfigured != null) {
+          String aclStr = aclConfigured.toString();
+          out.print("&nbsp;&nbsp;&nbsp;&nbsp;" + aclConfigName + ": "
+              + HtmlQuoting.quoteHtmlChars(aclStr) + "<br>");
+        }
+      }
     }
   }
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobACLsManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobACLsManager.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobACLsManager.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobACLsManager.java Mon Mar  8 09:43:18 2010
@@ -46,9 +46,9 @@
    * @return JobACL to AccessControlList map.
    */
   Map<JobACL, AccessControlList> constructJobACLs(JobConf conf) {
-    
+
     Map<JobACL, AccessControlList> acls =
-      new HashMap<JobACL, AccessControlList>();
+        new HashMap<JobACL, AccessControlList>();
 
     // Don't construct anything if authorization is disabled.
     if (!isJobLevelAuthorizationEnabled()) {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Mon Mar  8 09:43:18 2010
@@ -708,8 +708,9 @@
     if (jobname == null) { jobname = ""; }
     setUpLocalizedJobConf(conf, jobId);
     jobHistory.setupEventWriter(jobId, conf);
-    JobSubmittedEvent jse = new JobSubmittedEvent(jobId, jobname, username,
-        this.startTime, jobFile.toString());
+    JobSubmittedEvent jse =
+        new JobSubmittedEvent(jobId, jobname, username, this.startTime,
+            jobFile.toString(), status.getJobACLs());
     jobHistory.logEvent(jse, jobId);
     
   }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLogServlet.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLogServlet.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLogServlet.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLogServlet.java Mon Mar  8 09:43:18 2010
@@ -232,7 +232,7 @@
                      TaskLog.LogName.STDOUT, isCleanup);
         printTaskLog(response, out, attemptId, start, end, plainText, 
                      TaskLog.LogName.STDERR, isCleanup);
-        printTaskLog(response, out, attemptId, start, end, plainText, 
+        printTaskLog(response, out, attemptId, start, end, plainText,
                      TaskLog.LogName.SYSLOG, isCleanup);
         if (haveTaskLog(attemptId, TaskLog.LogName.DEBUGOUT)) {
           printTaskLog(response, out, attemptId, start, end, plainText, 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/Events.avpr
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/Events.avpr?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/Events.avpr (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/Events.avpr Mon Mar  8 09:43:18 2010
@@ -74,7 +74,11 @@
           {"name": "jobName", "type": "string"},
           {"name": "userName", "type": "string"},
           {"name": "submitTime", "type": "long"},
-          {"name": "jobConfPath", "type": "string"}
+          {"name": "jobConfPath", "type": "string"},
+          {"name": "acls", "type": {"type": "map",
+                                    "values": "string"
+                                    }
+          }
       ]
      },
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java Mon Mar  8 09:43:18 2010
@@ -212,6 +212,34 @@
   }
 
   /**
+   * Get the JobID from the history file's name. See it's companion method
+   * {@link #getJobHistoryFile(Path, JobID, String)} for how history file's name
+   * is constructed from a given JobID and userName.
+   * 
+   * @param jobHistoryFilePath
+   * @return jobID
+   */
+  public static JobID getJobIDFromHistoryFilePath(Path jobHistoryFilePath) {
+    String[] jobDetails = jobHistoryFilePath.getName().split("_");
+    String jobId =
+        jobDetails[0] + "_" + jobDetails[1] + "_" + jobDetails[2];
+    return JobID.forName(jobId);
+  }
+
+  /**
+   * Get the user name of the job-submitter from the history file's name. See
+   * it's companion method {@link #getJobHistoryFile(Path, JobID, String)} for
+   * how history file's name is constructed from a given JobID and username.
+   * 
+   * @param jobHistoryFilePath
+   * @return the user-name
+   */
+  public static String getUserFromHistoryFilePath(Path jobHistoryFilePath) {
+    String[] jobDetails = jobHistoryFilePath.getName().split("_");
+    return jobDetails[3];
+  }
+
+  /**
    * Given the job id, return the history file path from the cache
    */
   public String getHistoryFilePath(JobID jobId) {
@@ -253,7 +281,7 @@
   
     /* Storing the job conf on the log dir */
   
-    Path logDirConfPath = getConfFile(jobId);
+    Path logDirConfPath = getConfFile(logDir, jobId);
     LOG.info("LogDirConfPath is " + logDirConfPath);
   
     FSDataOutputStream jobFileOut = null;
@@ -326,7 +354,14 @@
         TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>());
   }
 
-  Path getConfFile(JobID jobId) {
+  /**
+   * Get the job conf file for the given jobId
+   * 
+   * @param logDir
+   * @param jobId
+   * @return the jobconf.xml path
+   */
+  public static Path getConfFile(Path logDir, JobID jobId) {
     Path jobFilePath = null;
     if (logDir != null) {
       jobFilePath = new Path(logDir + File.separator +

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java Mon Mar  8 09:43:18 2010
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapred.JobStatus;
@@ -33,6 +34,7 @@
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapred.TaskStatus;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 /**
  * Default Parser for the JobHistory files. Typical usage is
@@ -307,6 +309,7 @@
     info.username = event.getUserName();
     info.submitTime = event.getSubmitTime();
     info.jobConfPath = event.getJobConfPath();
+    info.jobACLs = event.getJobAcls();
   }
 
   /**
@@ -331,6 +334,7 @@
     Counters mapCounters;
     Counters reduceCounters;
     JobPriority priority;
+    Map<JobACL, AccessControlList> jobACLs;
     
     Map<TaskID, TaskInfo> tasksMap;
     
@@ -343,6 +347,7 @@
       finishedMaps = finishedReduces = 0;
       username = jobname = jobConfPath = "";
       tasksMap = new HashMap<TaskID, TaskInfo>();
+      jobACLs = new HashMap<JobACL, AccessControlList>();
     }
     
     /** Print all the job information */
@@ -402,6 +407,7 @@
     public Map<TaskID, TaskInfo> getAllTasks() { return tasksMap; }
     /** Get the priority of this job */
     public String getPriority() { return priority.toString(); }
+    public Map<JobACL, AccessControlList> getJobACLs() { return jobACLs; }
   }
   
   /**

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java Mon Mar  8 09:43:18 2010
@@ -18,9 +18,13 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
 
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 import org.apache.avro.util.Utf8;
 
@@ -32,20 +36,40 @@
   private JobSubmitted datum = new JobSubmitted();
 
   /**
+   * @deprecated Use
+   *             {@link #JobSubmittedEvent(JobID, String, String, long, String, Map)}
+   *             instead.
+   */
+  @Deprecated
+  public JobSubmittedEvent(JobID id, String jobName, String userName,
+      long submitTime, String jobConfPath) {
+    this(id, jobName, userName, submitTime, jobConfPath,
+        new HashMap<JobACL, AccessControlList>());
+  }
+
+  /**
    * Create an event to record job submission
    * @param id The job Id of the job
    * @param jobName Name of the job
    * @param userName Name of the user who submitted the job
    * @param submitTime Time of submission
    * @param jobConfPath Path of the Job Configuration file
+   * @param jobACLs The configured acls for the job.
    */
   public JobSubmittedEvent(JobID id, String jobName, String userName,
-      long submitTime, String jobConfPath) {
+      long submitTime, String jobConfPath,
+      Map<JobACL, AccessControlList> jobACLs) {
     datum.jobid = new Utf8(id.toString());
     datum.jobName = new Utf8(jobName);
     datum.userName = new Utf8(userName);
     datum.submitTime = submitTime;
     datum.jobConfPath = new Utf8(jobConfPath);
+    Map<Utf8, Utf8> jobAcls = new HashMap<Utf8, Utf8>();
+    for (Entry<JobACL, AccessControlList> entry : jobACLs.entrySet()) {
+      jobAcls.put(new Utf8(entry.getKey().getAclName()), new Utf8(
+          entry.getValue().toString()));
+    }
+    datum.acls = jobAcls;
   }
 
   JobSubmittedEvent() {}
@@ -65,6 +89,19 @@
   public long getSubmitTime() { return datum.submitTime; }
   /** Get the Path for the Job Configuration file */
   public String getJobConfPath() { return datum.jobConfPath.toString(); }
+  /** Get the acls configured for the job **/
+  public Map<JobACL, AccessControlList> getJobAcls() {
+    Map<JobACL, AccessControlList> jobAcls =
+        new HashMap<JobACL, AccessControlList>();
+    for (JobACL jobACL : JobACL.values()) {
+      Utf8 jobACLsUtf8 = new Utf8(jobACL.getAclName());
+      if (datum.acls.containsKey(jobACLsUtf8)) {
+        jobAcls.put(jobACL, new AccessControlList(datum.acls.get(
+            jobACLsUtf8).toString()));
+      }
+    }
+    return jobAcls;
+  }
   /** Get the event type */
   public EventType getEventType() { return EventType.JOB_SUBMITTED; }
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java Mon Mar  8 09:43:18 2010
@@ -43,6 +43,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskID;
@@ -55,6 +56,7 @@
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 /**
  *
@@ -938,8 +940,10 @@
       JobHistory jh = jt.getJobHistory();
       final JobID jobId = JobID.forName("job_200809171136_0001");
       jh.setupEventWriter(jobId, conf);
+      Map<JobACL, AccessControlList> jobACLs =
+          new HashMap<JobACL, AccessControlList>();
       JobSubmittedEvent jse =
-        new JobSubmittedEvent(jobId, "job", "user", 12345, "path");
+        new JobSubmittedEvent(jobId, "job", "user", 12345, "path", jobACLs);
       jh.logEvent(jse, jobId);
       jh.closeWriter(jobId);
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java Mon Mar  8 09:43:18 2010
@@ -18,12 +18,15 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
@@ -31,6 +34,7 @@
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 /**
  * Unit test to test if the JobHistory writer/parser is able to handle
@@ -70,8 +74,17 @@
     jh.init(jt, conf, "localhost", 1234);
     JobID jobId = JobID.forName("job_200809171136_0001");
     jh.setupEventWriter(jobId, conf);
+    Map<JobACL, AccessControlList> jobACLs =
+        new HashMap<JobACL, AccessControlList>();
+    AccessControlList viewJobACL =
+        new AccessControlList("user1,user2 group1,group2");
+    AccessControlList modifyJobACL =
+        new AccessControlList("user3,user4 group3, group4");
+    jobACLs.put(JobACL.VIEW_JOB, viewJobACL);
+    jobACLs.put(JobACL.MODIFY_JOB, modifyJobACL);
     JobSubmittedEvent jse =
-      new JobSubmittedEvent(jobId, weirdJob, username, 12345, weirdPath);
+        new JobSubmittedEvent(jobId, weirdJob, username, 12345, weirdPath,
+            jobACLs);
     jh.logEvent(jse, jobId);
 
     JobFinishedEvent jfe =
@@ -109,6 +122,12 @@
     assertTrue (jobInfo.getUsername().equals(username));
     assertTrue(jobInfo.getJobname().equals(weirdJob));
     assertTrue(jobInfo.getJobConfPath().equals(weirdPath));
+    Map<JobACL, AccessControlList> parsedACLs = jobInfo.getJobACLs();
+    assertEquals(2, parsedACLs.size());
+    assertTrue(parsedACLs.get(JobACL.VIEW_JOB).toString().equals(
+        viewJobACL.toString()));
+    assertTrue(parsedACLs.get(JobACL.MODIFY_JOB).toString().equals(
+        modifyJobACL.toString()));
 
     if (mr != null) {
       mr.shutdown();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java Mon Mar  8 09:43:18 2010
@@ -26,11 +26,18 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.http.TestHttpServer.DummyFilterInitializer;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -109,21 +116,28 @@
    *     cannot view the job
    * (5) other unauthorized users cannot view the job
    */
-  private void validateViewJob(String url, String method) throws IOException {
-    assertEquals(HttpURLConnection.HTTP_OK,
-        getHttpStatusCode(url, jobSubmitter, method));
-    assertEquals(HttpURLConnection.HTTP_OK,
-        getHttpStatusCode(url, superGroupMember, method));
-    assertEquals(HttpURLConnection.HTTP_OK,
-        getHttpStatusCode(url, mrOwner, method));
-    assertEquals(HttpURLConnection.HTTP_OK,
-        getHttpStatusCode(url, viewColleague, method));
-    assertEquals(HttpURLConnection.HTTP_OK,
-        getHttpStatusCode(url, viewAndModifyColleague, method));
-    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED,
-        getHttpStatusCode(url, modifyColleague, method));
-    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED,
-        getHttpStatusCode(url, unauthorizedUser, method));
+  private void validateViewJob(String url, String method)
+      throws IOException {
+    assertEquals("Incorrect return code for " + jobSubmitter,
+        HttpURLConnection.HTTP_OK, getHttpStatusCode(url, jobSubmitter,
+            method));
+    assertEquals("Incorrect return code for " + superGroupMember,
+        HttpURLConnection.HTTP_OK, getHttpStatusCode(url, superGroupMember,
+            method));
+    assertEquals("Incorrect return code for " + mrOwner,
+        HttpURLConnection.HTTP_OK, getHttpStatusCode(url, mrOwner, method));
+    assertEquals("Incorrect return code for " + viewColleague,
+        HttpURLConnection.HTTP_OK, getHttpStatusCode(url, viewColleague,
+            method));
+    assertEquals("Incorrect return code for " + viewAndModifyColleague,
+        HttpURLConnection.HTTP_OK, getHttpStatusCode(url,
+            viewAndModifyColleague, method));
+    assertEquals("Incorrect return code for " + modifyColleague,
+        HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(url,
+            modifyColleague, method));
+    assertEquals("Incorrect return code for " + unauthorizedUser,
+        HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(url,
+            unauthorizedUser, method));
   }
 
   /**
@@ -216,6 +230,125 @@
     }
   }
 
+  public void testAuthorizationForJobHistoryPages() throws Exception {
+    JobConf conf = new JobConf();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        MyGroupsProvider.class.getName());
+    Groups.getUserToGroupsMappingService(conf);
+    Properties props = new Properties();
+    props.setProperty("hadoop.http.filter.initializers",
+        DummyFilterInitializer.class.getName());
+    props.setProperty(MRConfig.JOB_LEVEL_AUTHORIZATION_ENABLING_FLAG,
+        String.valueOf(true));
+    props.setProperty("dfs.permissions.enabled", "false");
+
+    props.setProperty("mapreduce.job.committer.setup.cleanup.needed",
+        "false");
+    props.setProperty(MRConfig.MR_SUPERGROUP, "superGroup");
+
+    MyGroupsProvider.mapping.put(jobSubmitter, Arrays.asList("group1"));
+    MyGroupsProvider.mapping.put(viewColleague, Arrays.asList("group2"));
+    MyGroupsProvider.mapping.put(modifyColleague, Arrays.asList("group1"));
+    MyGroupsProvider.mapping.put(unauthorizedUser, Arrays.asList("evilSociety"));
+    MyGroupsProvider.mapping.put(superGroupMember, Arrays.asList("superGroup"));
+    MyGroupsProvider.mapping.put(viewAndModifyColleague, Arrays.asList("group3"));
+    mrOwner = UserGroupInformation.getCurrentUser().getShortUserName();
+    MyGroupsProvider.mapping.put(mrOwner, Arrays.asList(
+        new String[] { "group4", "group5" }));
+
+    startCluster(true, props);
+    MiniMRCluster cluster = getMRCluster();
+    int infoPort = cluster.getJobTrackerRunner().getJobTrackerInfoPort();
+
+    conf = new JobConf(cluster.createJobConf());
+    conf.set(JobContext.JOB_ACL_VIEW_JOB, viewColleague + " group3");
+
+    // Let us add group1 and group3 to modify-job-acl. So modifyColleague and
+    // viewAndModifyColleague will be able to modify the job
+    conf.set(JobContext.JOB_ACL_MODIFY_JOB, " group1,group3");
+
+    final SleepJob sleepJob = new SleepJob();
+    sleepJob.setConf(conf);
+    UserGroupInformation jobSubmitterUGI =
+        UserGroupInformation.createRemoteUser(jobSubmitter);
+    Job job = jobSubmitterUGI.doAs(new PrivilegedExceptionAction<Job>() {
+      public Job run() throws Exception {
+        // Very large sleep job.
+        Job job = sleepJob.createJob(1, 0, 1000, 1, 0, 0);
+        job.waitForCompletion(true);
+        return job;
+      }
+    });
+
+    org.apache.hadoop.mapreduce.JobID jobid = job.getID();
+
+    String historyFileName = job.getStatus().getHistoryFile();
+    String jtURL = "http://localhost:" + infoPort;
+
+    // Job will automatically be retired. Now test jsps..
+
+    // validate access of jobdetails_history.jsp
+    String jobDetailsJSP =
+        jtURL + "/jobdetailshistory.jsp?logFile=" + historyFileName;
+    validateViewJob(jobDetailsJSP, "GET");
+
+    // validate accesses of jobtaskshistory.jsp
+    String jobTasksJSP =
+        jtURL + "/jobtaskshistory.jsp?logFile=" + historyFileName;
+    String[] taskTypes =
+        new String[] { "JOb_SETUP", "MAP", "REDUCE", "JOB_CLEANUP" };
+    String[] states =
+        new String[] { "all", "SUCCEEDED", "FAILED", "KILLED" };
+    for (String taskType : taskTypes) {
+      for (String state : states) {
+        validateViewJob(jobTasksJSP + "&taskType=" + taskType + "&status="
+            + state, "GET");
+      }
+    }
+
+    JobHistoryParser parser =
+        new JobHistoryParser(new Path(historyFileName).getFileSystem(conf),
+            historyFileName);
+    JobInfo jobInfo = parser.parse();
+    Map<TaskID, TaskInfo> tipsMap = jobInfo.getAllTasks();
+    for (TaskID tip : tipsMap.keySet()) {
+      // validate access of taskdetailshistory.jsp
+      validateViewJob(jtURL + "/taskdetailshistory.jsp?logFile="
+          + historyFileName + "&tipid=" + tip.toString(), "GET");
+
+      Map<TaskAttemptID, TaskAttemptInfo> attemptsMap =
+          tipsMap.get(tip).getAllTaskAttempts();
+      for (TaskAttemptID attempt : attemptsMap.keySet()) {
+
+        // validate access to taskstatshistory.jsp
+        validateViewJob(jtURL + "/taskstatshistory.jsp?attemptid="
+            + attempt.toString() + "&logFile=" + historyFileName, "GET");
+
+        // validate access to tasklogs - STDOUT and STDERR. SYSLOGs are not
+        // generated for the 1 map sleep job in the test case.
+        validateViewJob(TaskLogServlet.getTaskLogUrl("localhost",
+            Integer.toString(attemptsMap.get(attempt).getHttpPort()),
+            attempt.toString())
+            + "&filter=" + TaskLog.LogName.STDOUT, "GET");
+
+        validateViewJob(TaskLogServlet.getTaskLogUrl("localhost",
+            Integer.toString(attemptsMap.get(attempt).getHttpPort()),
+            attempt.toString())
+            + "&filter=" + TaskLog.LogName.STDERR, "GET");        
+      }
+    }
+
+    // validate access to analysejobhistory.jsp
+    String analyseJobHistoryJSP =
+        jtURL + "/analysejobhistory.jsp?logFile=" + historyFileName;
+    validateViewJob(analyseJobHistoryJSP, "GET");
+
+    // validate access of jobconf_history.jsp
+    String jobConfJSP =
+        jtURL + "/jobconf_history.jsp?logFile=" + historyFileName;
+    validateViewJob(jobConfJSP, "GET");
+  }
+
   /**
    * Starts a sleep job and tries to kill the job using jobdetails.jsp as
    * (1) viewColleague (2) unauthorizedUser (3) modifyColleague

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java Mon Mar  8 09:43:18 2010
@@ -25,6 +25,7 @@
 
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
@@ -34,6 +35,7 @@
 import org.apache.hadoop.mapreduce.jobhistory.JobStatusChangedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 public class Job20LineHistoryEventEmitter extends HistoryEventEmitter {
 
@@ -78,8 +80,10 @@
 
         that.originalSubmitTime = Long.parseLong(submitTime);
 
+        Map<JobACL, AccessControlList> jobACLs =
+          new HashMap<JobACL, AccessControlList>();
         return new JobSubmittedEvent(jobID, jobName, user == null ? "nulluser"
-            : user, that.originalSubmitTime, jobConf);
+            : user, that.originalSubmitTime, jobConf, jobACLs);
       }
 
       return null;

Modified: hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp Mon Mar  8 09:43:18 2010
@@ -37,7 +37,6 @@
 %>
 <html><body>
 <%
-  String jobid = JobID.forName(request.getParameter("jobid")).toString();
   String logFile = request.getParameter("logFile");
   String numTasks = request.getParameter("numTasks");
   int showTasks = 10 ; 
@@ -45,9 +44,14 @@
     showTasks = Integer.parseInt(numTasks);  
   }
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
+  JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+  JobHistoryParser.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+      response, jobTracker, fs, new Path(logFile));
+  if (job == null) {
+    return;
+  }
 %>
-<h2>Hadoop Job <a href="jobdetailshistory.jsp?jobid=<%=jobid%>&&logFile=<%=logFile%>"><%=jobid %> </a></h2>
+<h2>Hadoop Job <a href="jobdetailshistory.jsp?logFile=<%=logFile%>"><%=job.getJobId() %> </a></h2>
 <b>User : </b> <%=HtmlQuoting.quoteHtmlChars(job.getUsername()) %><br/>
 <b>JobName : </b> <%=HtmlQuoting.quoteHtmlChars(job.getJobname()) %><br/>
 <b>JobConf : </b> <%=job.getJobConfPath() %><br/> 
@@ -125,7 +129,7 @@
 %>
 
 <h3>Time taken by best performing Map task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=minMap.getAttemptId().getTaskID()%>">
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=minMap.getAttemptId().getTaskID()%>">
 <%=minMap.getAttemptId().getTaskID() %></a> : <%=StringUtils.formatTimeDiff(minMap.getFinishTime(), minMap.getStartTime() ) %></h3>
 <h3>Average time taken by Map tasks: 
 <%=StringUtils.formatTimeDiff(avg.getAvgMapTime(), 0) %></h3>
@@ -136,7 +140,7 @@
   for (int i=0;i<showTasks && i<mapTasks.length; i++) {
 %>
     <tr>
-    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=mapTasks[i].getAttemptId().getTaskID()%>">
+    <td><a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=mapTasks[i].getAttemptId().getTaskID()%>">
         <%=mapTasks[i].getAttemptId().getTaskID() %></a></td>
     <td><%=StringUtils.formatTimeDiff(mapTasks[i].getFinishTime(), mapTasks[i].getStartTime()) %></td>
     </tr>
@@ -150,8 +154,8 @@
 %>
 
 <h3>The last Map task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
-&taskid=<%=lastMap.getAttemptId().getTaskID()%>"><%=lastMap.getAttemptId().getTaskID() %></a> 
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>
+&tipid=<%=lastMap.getAttemptId().getTaskID()%>"><%=lastMap.getAttemptId().getTaskID() %></a> 
 finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat, 
                               lastMap.getFinishTime(), 
@@ -164,8 +168,8 @@
   JobHistoryParser.TaskAttemptInfo minShuffle = reduceTasks[reduceTasks.length-1] ;
 %>
 <h3>Time taken by best performing shuffle
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
-&taskid=<%=minShuffle.getAttemptId().getTaskID()%>"><%=minShuffle.getAttemptId().getTaskID()%></a> : 
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>
+&tipid=<%=minShuffle.getAttemptId().getTaskID()%>"><%=minShuffle.getAttemptId().getTaskID()%></a> : 
 <%=StringUtils.formatTimeDiff(minShuffle.getShuffleFinishTime(),
                               minShuffle.getStartTime() ) %></h3>
 <h3>Average time taken by Shuffle: 
@@ -177,8 +181,8 @@
   for (int i=0;i<showTasks && i<reduceTasks.length; i++) {
 %>
     <tr>
-    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=
-<%=logFile%>&taskid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
+    <td><a href="taskdetailshistory.jsp?logFile=
+<%=logFile%>&tipid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
 <%=reduceTasks[i].getAttemptId().getTaskID() %></a></td>
     <td><%=
            StringUtils.formatTimeDiff(
@@ -196,8 +200,8 @@
 %>
 
 <h3>The last Shuffle  
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
-&taskid=<%=lastShuffle.getAttemptId().getTaskID()%>"><%=lastShuffle.getAttemptId().getTaskID()%>
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>
+&tipid=<%=lastShuffle.getAttemptId().getTaskID()%>"><%=lastShuffle.getAttemptId().getTaskID()%>
 </a> finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat,
                               lastShuffle.getShuffleFinishTime(),
@@ -209,7 +213,7 @@
 %>
 <hr/>
 <h3>Time taken by best performing Reduce task : 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=minReduce.getAttemptId().getTaskID()%>">
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=minReduce.getAttemptId().getTaskID()%>">
 <%=minReduce.getAttemptId().getTaskID() %></a> : 
 <%=StringUtils.formatTimeDiff(minReduce.getFinishTime(),
     minReduce.getShuffleFinishTime() ) %></h3>
@@ -223,7 +227,7 @@
   for (int i=0;i<showTasks && i<reduceTasks.length; i++) {
 %>
     <tr>
-    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
+    <td><a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
         <%=reduceTasks[i].getAttemptId().getTaskID() %></a></td>
     <td><%=StringUtils.formatTimeDiff(
              reduceTasks[i].getFinishTime(),
@@ -239,8 +243,8 @@
 %>
 
 <h3>The last Reduce task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
-&taskid=<%=lastReduce.getAttemptId().getTaskID()%>"><%=lastReduce.getAttemptId().getTaskID()%>
+<a href="taskdetailshistory.jsp?logFile=<%=logFile%>
+&tipid=<%=lastReduce.getAttemptId().getTaskID()%>"><%=lastReduce.getAttemptId().getTaskID()%>
 </a> finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat,
                               lastReduce.getFinishTime(),

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp Mon Mar  8 09:43:18 2010
@@ -26,6 +26,10 @@
   import="org.apache.hadoop.fs.*"
   import="org.apache.hadoop.util.*"
   import="org.apache.hadoop.mapreduce.jobhistory.*"
+  import="org.apache.hadoop.mapreduce.JobACL"
+  import="org.apache.hadoop.security.UserGroupInformation"
+  import="org.apache.hadoop.security.authorize.AccessControlList"
+  import="org.apache.hadoop.security.AccessControlException"
 %>
 
 <%!	private static final long serialVersionUID = 1L;
@@ -33,11 +37,16 @@
 
 <%
   JobTracker tracker = (JobTracker) application.getAttribute("job.tracker");
-  String jobId = JobID.forName(request.getParameter("jobid")).toString();
-  if (jobId == null) {
-    out.println("<h2>Missing 'jobid' for fetching job configuration!</h2>");
- 	return;
+
+  String logFileString = request.getParameter("logFile");
+  if (logFileString == null) {
+    out.println("<h2>Missing 'logFile' for fetching job configuration!</h2>");
+    return;
   }
+
+  Path logFile = new Path(logFileString);
+  String jobId = JobHistory.getJobIDFromHistoryFilePath(logFile).toString();
+
 %>
   
 <html>
@@ -48,14 +57,31 @@
 <h2>Job Configuration: JobId - <%= jobId %></h2><br>
 
 <%
-  Path logDir = new Path(request.getParameter("jobLogDir"));
-  Path jobFilePath = new Path(logDir, 
-                       request.getParameter("jobUniqueString") + "_conf.xml");
+  Path jobFilePath = JSPUtil.getJobConfFilePath(logFile);
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
   FSDataInputStream jobFile = null; 
   try {
     jobFile = fs.open(jobFilePath);
     JobConf jobConf = new JobConf(jobFilePath);
+    JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+    String user = request.getRemoteUser();
+    if (user != null) {
+      try {
+        jobTracker.getJobACLsManager().checkAccess(JobID.forName(jobId),
+            UserGroupInformation.createRemoteUser(user), JobACL.VIEW_JOB,
+            jobConf.getUser(),
+            new AccessControlList(jobConf.get(JobACL.VIEW_JOB.getAclName())));
+        } catch (AccessControlException e) {
+          String errMsg =
+            user
+                + " is not authorized to view details of job "
+                + jobId
+                + "<hr><a href=\"jobhistory.jsp\">Go back to JobHistory</a><br>";
+        JSPUtil.setErrorAndForward(errMsg, request, response);
+        return;
+        }
+    }
+
     XMLUtils.transform(
         jobConf.getConfResourceAsInputStream("webapps/static/jobconf.xsl"),
         jobFile, out);

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobdetails.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobdetails.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobdetails.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobdetails.jsp Mon Mar  8 09:43:18 2010
@@ -305,21 +305,9 @@
     out.print("<b>Job File:</b> <a href=\"jobconf.jsp?jobid=" + jobId + "\">" +
         profile.getJobFile() + "</a><br>\n");
 
-    if (tracker.isJobLevelAuthorizationEnabled()) {
-      // Display job-view-acls and job-modify-acls configured for this job
-      Map<JobACL, AccessControlList> jobAcls = status.getJobACLs();
-      out.print("<b>Job-ACLs:</b><br>");
-      for (JobACL aclName : JobACL.values()) {
-        String aclConfigName = aclName.getAclName();
-        AccessControlList aclConfigured = jobAcls.get(aclName);
-        String aclStr = "";
-        if (aclConfigured != null) {
-          aclStr = aclConfigured.toString();
-        }
-        out.print("&nbsp;&nbsp;&nbsp;&nbsp;" + aclConfigName + ": "
-                  + aclStr + "<br>");
-      }
-    }
+    Map<JobACL, AccessControlList> jobAcls = status.getJobACLs();
+    JSPUtil.printJobACLs(tracker, jobAcls, out);
+
     out.print("<b>Job Setup:</b>");
     printJobLevelTaskSummary(out, jobId, "setup", 
                              job.getTasks(TaskType.JOB_SETUP));

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp Mon Mar  8 09:43:18 2010
@@ -33,21 +33,28 @@
   import="org.apache.hadoop.util.*"
   import="java.text.*"
   import="org.apache.hadoop.mapreduce.jobhistory.*"
+  import="java.security.PrivilegedExceptionAction"
+  import="org.apache.hadoop.security.AccessControlException"
+  import="org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo"
+  import="org.apache.hadoop.mapreduce.JobACL"
+  import="org.apache.hadoop.security.authorize.AccessControlList"
 %>
 <%!private static final long serialVersionUID = 1L;
 %>
 
 <%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
 <%
-    String jobid = JobID.forName(request.getParameter("jobid")).toString();
     String logFile = request.getParameter("logFile");
+    final Path jobFile = new Path(logFile);
+    String jobid = JobHistory.getJobIDFromHistoryFilePath(jobFile).toString();
 
-    Path jobFile = new Path(logFile);
-    String[] jobDetails = jobFile.getName().split("_");
-    String jobUniqueString = jobid;
-
-    FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-    JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
+    final FileSystem fs = (FileSystem) application.getAttribute("fileSys");
+    final JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+    JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request, response,
+        jobTracker, fs, jobFile);
+    if (job == null) {
+      return;
+    }
 %>
 
 <html>
@@ -61,8 +68,12 @@
 
 <b>User: </b> <%=HtmlQuoting.quoteHtmlChars(job.getUsername()) %><br/>
 <b>JobName: </b> <%=HtmlQuoting.quoteHtmlChars(job.getJobname()) %><br/>
-<b>JobConf: </b> <a href="jobconf_history.jsp?jobid=<%=jobid%>&jobLogDir=<%=new Path(logFile).getParent().toString()%>&jobUniqueString=<%=jobUniqueString%>"> 
+<b>JobConf: </b> <a href="jobconf_history.jsp?logFile=<%=logFile%>"> 
                  <%=job.getJobConfPath() %></a><br/> 
+<%         
+  Map<JobACL, AccessControlList> jobAcls = job.getJobACLs();
+  JSPUtil.printJobACLs(jobTracker, jobAcls, out);
+%>
 <b>Submitted At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getSubmitTime(), 0 )  %><br/> 
 <b>Launched At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLaunchTime(), job.getSubmitTime()) %><br/>
 <b>Finished At: </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getFinishTime(), job.getLaunchTime()) %><br/>
@@ -70,7 +81,7 @@
 <%
     HistoryViewer.SummarizedJob sj = new HistoryViewer.SummarizedJob(job);
 %>
-<b><a href="analysejobhistory.jsp?jobid=<%=jobid %>&logFile=<logFile%>">Analyse This Job</a></b> 
+<b><a href="analysejobhistory.jsp?logFile=<%=logFile%>">Analyse This Job</a></b> 
 <hr/>
 <center>
 <table border="2" cellpadding="5" cellspacing="2">
@@ -79,52 +90,52 @@
 </tr>
 <tr>
 <td>Setup</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=all">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_SETUP&status=all">
         <%=sj.getTotalSetups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=SUCCEEDED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_SETUP&status=SUCCEEDED">
         <%=sj.getNumFinishedSetups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=FAILED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_SETUP&status=FAILED">
         <%=sj.getNumFailedSetups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=KILLED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_SETUP&status=KILLED">
         <%=sj.getNumKilledSetups()%></a></td>  
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupStarted(), 0) %></td>
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupFinished(), sj.getSetupStarted()) %></td>
 </tr>
 <tr>
 <td>Map</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=all">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=MAP&status=all">
         <%=sj.getTotalMaps()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=SUCCEEDED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=MAP&status=SUCCEEDED">
         <%=job.getFinishedMaps() %></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=FAILED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=MAP&status=FAILED">
         <%=sj.getNumFailedMaps()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=KILLED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=MAP&status=KILLED">
         <%=sj.getNumKilledMaps()%></a></td>
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapStarted(), 0) %></td>
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapFinished(), sj.getMapStarted()) %></td>
 </tr>
 <tr>
 <td>Reduce</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=all">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=REDUCE&status=all">
         <%=sj.getTotalReduces()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=SUCCEEDED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=REDUCE&status=SUCCEEDED">
         <%=job.getFinishedReduces()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=FAILED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=REDUCE&status=FAILED">
         <%=sj.getNumFailedReduces()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=KILLED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=REDUCE&status=KILLED">
         <%=sj.getNumKilledReduces()%></a></td>  
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceStarted(), 0) %></td>
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceFinished(), sj.getReduceStarted()) %></td>
 </tr>
 <tr>
 <td>Cleanup</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=all">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=all">
         <%=sj.getTotalCleanups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=SUCCEEDED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=SUCCEEDED">
         <%=sj.getNumFinishedCleanups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=FAILED">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=FAILED">
         <%=sj.getNumFailedCleanups()%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=KILLED>">
+    <td><a href="jobtaskshistory.jsp?logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=KILLED>">
         <%=sj.getNumKilledCleanups()%></a></td>  
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupStarted(), 0) %></td>
     <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupFinished(), sj.getCleanupStarted()) %></td>
@@ -215,11 +226,11 @@
              if (firstId) {
               firstId = false;
 %>
-            <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+            <a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=tid %>"><%=tid %></a>
 <%		  
           } else {
 %>	
-            ,&nbsp<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+            ,&nbsp<a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=tid %>"><%=tid %></a>
 <%		  
           }
         }
@@ -255,11 +266,11 @@
              if (firstId) {
               firstId = false;
 %>
-            <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+            <a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=tid %>"><%=tid %></a>
 <%		  
           } else {
 %>	
-            ,&nbsp<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+            ,&nbsp<a href="taskdetailshistory.jsp?logFile=<%=logFile%>&tipid=<%=tid %>"><%=tid %></a>
 <%		  
           }
         }

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp Mon Mar  8 09:43:18 2010
@@ -233,11 +233,9 @@
     Set<String> displayedJobs = new HashSet<String>();
     for (int i = start - 1; i < start + length - 1; ++i) {
       Path jobFile = jobFiles[i];
-      
-      String[] jobDetails = jobFile.getName().split("_");
 
-      String jobId = jobDetails[0] + "_" +jobDetails[1] + "_" + jobDetails[2] ;
-      String userName = jobDetails[3];
+      String jobId = JobHistory.getJobIDFromHistoryFilePath(jobFile).toString();
+      String userName = JobHistory.getUserFromHistoryFilePath(jobFile);
 
       // Check if the job is already displayed. There can be multiple job 
       // history files for jobs that have restarted
@@ -266,8 +264,8 @@
                           String user, Path logFile, JspWriter out)
     throws IOException {
       out.print("<tr>"); 
-      out.print("<td>" + "<a href=\"jobdetailshistory.jsp?jobid=" + jobId + 
-                "&logFile=" + URLEncoder.encode(logFile.toString(), "UTF-8") +
+      out.print("<td>" + "<a href=\"jobdetailshistory.jsp?logFile=" +
+       URLEncoder.encode(logFile.toString(), "UTF-8") +
                 "\">" + HtmlQuoting.quoteHtmlChars(jobId) + "</a></td>");
       out.print("<td>" + HtmlQuoting.quoteHtmlChars(user) + "</td>");
       out.print("</tr>");

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp Mon Mar  8 09:43:18 2010
@@ -40,18 +40,22 @@
 %>
 
 <%	
-  String jobid = JobID.forName(request.getParameter("jobid")).toString();
   String logFile = request.getParameter("logFile");
-  String taskStatus = request.getParameter("status");
-  String taskType = request.getParameter("taskType");
+  String taskStatus = request.getParameter("status"); 
+  String taskType = request.getParameter("taskType"); 
   
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
+  JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+  JobHistoryParser.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+      response, jobTracker, fs, new Path(logFile));
+  if (job == null) {
+    return;
+  }
   Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks(); 
 %>
 <html>
 <body>
-<h2><%=taskStatus%> <%=taskType %> task list for <a href="jobdetailshistory.jsp?jobid=<%=jobid%>&&logFile=<%=logFile%>"><%=jobid %> </a></h2>
+<h2><%=taskStatus%> <%=taskType %> task list for <a href="jobdetailshistory.jsp?logFile=<%=logFile%>"><%=job.getJobId() %> </a></h2>
 <center>
 <table border="2" cellpadding="5" cellspacing="2">
 <tr><td>Task Id</td><td>Start Time</td><td>Finish Time<br/></td><td>Error</td></tr>
@@ -62,7 +66,7 @@
       for (JobHistoryParser.TaskAttemptInfo taskAttempt : taskAttempts.values()) {
         if (taskStatus.equals(taskAttempt.getTaskStatus()) || 
           taskStatus.equalsIgnoreCase("all")){
-          printTask(jobid, logFile, taskAttempt, out); 
+          printTask(logFile, taskAttempt, out); 
         }
       }
     }
@@ -70,11 +74,11 @@
 %>
 </table>
 <%!
-  private void printTask(String jobid, String logFile,
+  private void printTask(String logFile,
     JobHistoryParser.TaskAttemptInfo attempt, JspWriter out) throws IOException{
     out.print("<tr>"); 
-    out.print("<td>" + "<a href=\"taskdetailshistory.jsp?jobid=" + jobid + 
-          "&logFile="+ logFile +"&taskid="+attempt.getAttemptId().getTaskID().toString() +"\">" +
+    out.print("<td>" + "<a href=\"taskdetailshistory.jsp?logFile="+ logFile 
+        +"&tipid="+attempt.getAttemptId().getTaskID().toString() +"\">" +
           attempt.getAttemptId().getTaskID() + "</a></td>");
     out.print("<td>" + StringUtils.getFormattedTimeWithDiff(dateFormat, 
           attempt.getStartTime(), 0 ) + "</td>");

Modified: hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp Mon Mar  8 09:43:18 2010
@@ -39,17 +39,21 @@
 %>
 
 <%	
-  String jobid = JobID.forName(request.getParameter("jobid")).toString();
   String logFile = request.getParameter("logFile");
-  String taskid = request.getParameter("taskid"); 
+  String tipid = request.getParameter("tipid"); 
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
-  JobHistoryParser.TaskInfo task = job.getAllTasks().get(TaskID.forName(taskid)); 
+  JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+  JobHistoryParser.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+      response, jobTracker, fs, new Path(logFile));
+  if (job == null) {
+    return;
+  }
+  JobHistoryParser.TaskInfo task = job.getAllTasks().get(TaskID.forName(tipid)); 
   TaskType type = task.getTaskType();
 %>
 <html>
 <body>
-<h2><%=taskid %> attempts for <a href="jobdetailshistory.jsp?jobid=<%=jobid%>&&logFile=<%=logFile%>"> <%=jobid %> </a></h2>
+<h2><%=tipid %> attempts for <a href="jobdetailshistory.jsp?logFile=<%=logFile%>"> <%=job.getJobId() %> </a></h2>
 <center>
 <table border="2" cellpadding="5" cellspacing="2">
 <tr><td>Task Id</td><td>Start Time</td>
@@ -126,12 +130,8 @@
     Counters counters = taskAttempt.getCounters();
     if (counters != null) {
       TaskAttemptID attemptId = taskAttempt.getAttemptId();
-      TaskID taskId = attemptId.getTaskID();
-      org.apache.hadoop.mapreduce.JobID jobId = taskId.getJobID();
       out.print("<td>" 
-       + "<a href=\"/taskstatshistory.jsp?jobid=" + jobId
-           + "&taskid=" + taskId
-           + "&attemptid=" + attemptId
+       + "<a href=\"/taskstatshistory.jsp?attemptid=" + attemptId
            + "&logFile=" + logFile + "\">"
            + counters.countCounters() + "</a></td>");
     } else {

Modified: hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp?rev=920250&r1=920249&r2=920250&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp Mon Mar  8 09:43:18 2010
@@ -32,25 +32,33 @@
   import="org.apache.hadoop.mapreduce.TaskAttemptID" 
   import="org.apache.hadoop.mapreduce.Counter" 
   import="org.apache.hadoop.mapreduce.Counters" 
-  import="org.apache.hadoop.mapreduce.CounterGroup" 
+  import="org.apache.hadoop.mapreduce.CounterGroup"
 %>
 <%! private static SimpleDateFormat dateFormat = new SimpleDateFormat("d/MM HH:mm:ss") ;
     private static final long serialVersionUID = 1L;
 %>
 
 <%
-  String jobid = request.getParameter("jobid");
   String attemptid = request.getParameter("attemptid");
-  String taskid = request.getParameter("taskid");
+  if(attemptid == null) {
+    out.println("No attemptid found! Pass a 'attemptid' parameter in the request.");
+    return;
+  }
+  TaskID tipid = TaskAttemptID.forName(attemptid).getTaskID();
   String logFile = request.getParameter("logFile");
 
   Format decimal = new DecimalFormat();
 
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
+  JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
+  JobHistoryParser.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+      response, jobTracker, fs, new Path(logFile));
+  if (job == null) {
+    return;
+  }
 
   Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks();
-  JobHistoryParser.TaskInfo task = tasks.get(TaskID.forName(taskid));
+  JobHistoryParser.TaskInfo task = tasks.get(tipid);
 
   Map<TaskAttemptID, JobHistoryParser.TaskAttemptInfo> attempts = task.getAllTaskAttempts();
   JobHistoryParser.TaskAttemptInfo attempt = attempts.get(TaskAttemptID.forName(attemptid));
@@ -106,7 +114,7 @@
 %>
 
 <hr>
-<a href="jobdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>">Go back to the job</a><br>
+<a href="jobdetailshistory.jsp?logFile=<%=logFile%>">Go back to the job</a><br>
 <a href="jobtracker.jsp">Go back to JobTracker</a><br>
 <%
 out.println(ServletUtil.htmlFooter());



Mime
View raw message