hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r885145 [33/34] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ .eclipse.templates/.launches/ conf/ ivy/ lib/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/sr...
Date Sat, 28 Nov 2009 20:26:22 GMT
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/tools/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/tools/TestCopyFiles.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/tools/TestCopyFiles.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/tools/TestCopyFiles.java Sat Nov 28 20:26:01 2009
@@ -44,6 +44,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -495,6 +496,16 @@
                         "file:///"+TEST_ROOT_DIR+"/dest2/"+fname});
       assertTrue("Source and destination directories do not match.",
           checkFiles(fs, TEST_ROOT_DIR+"/dest2", files));     
+      
+      // single file update should skip copy if destination has the file already
+      String[] args = {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
+          "file:///"+TEST_ROOT_DIR+"/dest2/"+fname};
+      Configuration conf = new Configuration();
+      JobConf job = new JobConf(conf, DistCp.class);
+      DistCp.Arguments distcpArgs = DistCp.Arguments.valueOf(args, conf);
+      assertFalse("Single file update failed to skip copying even though the " 
+          + "file exists at destination.", DistCp.setup(conf, job, distcpArgs));
+      
       //copy single file to existing dir
       deldir(fs, TEST_ROOT_DIR+"/dest2");
       fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2"));
@@ -691,9 +702,9 @@
                         namenode+"/destdat"});
 
       System.out.println(execCmd(shell, "-lsr", logdir));
-      logs = fs.listStatus(new Path(namenode+"/logs"));
+      logs = fs.globStatus(new Path(namenode+"/logs/part*"));
       assertTrue("Unexpected map count, logs.length=" + logs.length,
-          logs.length == 2);
+          logs.length == 1);
     } finally {
       if (dfs != null) { dfs.shutdown(); }
       if (mr != null) { mr.shutdown(); }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/testshell/ExternalMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/testshell/ExternalMapReduce.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/testshell/ExternalMapReduce.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/testshell/ExternalMapReduce.java Sat Nov 28 20:26:01 2009
@@ -66,12 +66,21 @@
       if (classpath.indexOf("testjob.jar") == -1) {
         throw new IOException("failed to find in the library " + classpath);
       }
+      if (classpath.indexOf("test.jar") == -1) {
+        throw new IOException("failed to find the library test.jar in" 
+            + classpath);
+      }
       //fork off ls to see if the file exists.
       // java file.exists() will not work on 
       // cygwin since it is a symlink
-      String[] argv = new String[2];
+      String[] argv = new String[7];
       argv[0] = "ls";
       argv[1] = "files_tmp";
+      argv[2] = "localfilelink";
+      argv[3] = "dfsfilelink";
+      argv[4] = "tarlink";
+      argv[5] = "ziplink";
+      argv[6] = "test.tgz";
       Process p = Runtime.getRuntime().exec(argv);
       int ret = -1;
       try {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/DistCp.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/DistCp.java Sat Nov 28 20:26:01 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -100,6 +101,9 @@
     "\n-filelimit <n>         Limit the total number of files to be <= n" +
     "\n-sizelimit <n>         Limit the total size to be <= n bytes" +
     "\n-delete                Delete the files existing in the dst but not in src" +
+    "\n-dryrun                Display count of files and total size of files" +
+    "\n                        in src and then exit. Copy is not done at all." +
+    "\n                        desturl should not be speicified with out -update." +
     "\n-mapredSslConf <f>     Filename of SSL configuration for mapper task" +
     
     "\n\nNOTE 1: if -overwrite or -update are set, each source URI is " +
@@ -120,6 +124,7 @@
   private static final long BYTES_PER_MAP =  256 * 1024 * 1024;
   private static final int MAX_MAPS_PER_NODE = 20;
   private static final int SYNC_FILE_MAX = 10;
+  private static final int DEFAULT_FILE_RETRIES = 3;
 
   static enum Counter { COPY, SKIP, FAIL, BYTESCOPIED, BYTESEXPECTED }
   static enum Options {
@@ -193,6 +198,7 @@
   static final String BYTES_PER_MAP_LABEL = NAME + ".bytes.per.map";
   static final String PRESERVE_STATUS_LABEL
       = Options.PRESERVE_STATUS.propertyname + ".value";
+  static final String FILE_RETRIES_LABEL = NAME + ".file.retries";
 
   private JobConf conf;
 
@@ -368,10 +374,98 @@
     }
 
     /**
+     * Validates copy by checking the sizes of files first and then
+     * checksums, if the filesystems support checksums.
+     * @param srcstat src path and metadata
+     * @param absdst dst path
+     * @return true if src & destination files are same
+     */
+    private boolean validateCopy(FileStatus srcstat, Path absdst)
+            throws IOException {
+      if (destFileSys.exists(absdst)) {
+        if (sameFile(srcstat.getPath().getFileSystem(job), srcstat,
+            destFileSys, absdst)) {
+          return true;
+        }
+      }
+      return false;
+    }
+    
+    /**
+     * Increment number of files copied and bytes copied and then report status
+     */
+    void updateCopyStatus(FileStatus srcstat, Reporter reporter) {
+      copycount++;
+      reporter.incrCounter(Counter.BYTESCOPIED, srcstat.getLen());
+      reporter.incrCounter(Counter.COPY, 1);
+      updateStatus(reporter);
+    }
+    
+    /**
+     * Skip copying this file if already exists at the destination.
+     * Updates counters and copy status if skipping this file.
+     * @return true    if copy of this file can be skipped
+     */
+    private boolean skipCopyFile(FileStatus srcstat, Path absdst,
+                            OutputCollector<WritableComparable<?>, Text> outc,
+                            Reporter reporter) throws IOException {
+      if (destFileSys.exists(absdst) && !overwrite
+          && !needsUpdate(srcstat, destFileSys, absdst)) {
+        outc.collect(null, new Text("SKIP: " + srcstat.getPath()));
+        ++skipcount;
+        reporter.incrCounter(Counter.SKIP, 1);
+        updateStatus(reporter);
+        return true;
+      }
+      return false;
+    }
+    
+    /**
+     * Copies single file to the path specified by tmpfile.
+     * @param srcstat  src path and metadata
+     * @param tmpfile  temporary file to which copy is to be done
+     * @param absdst   actual destination path to which copy is to be done
+     * @param reporter
+     * @return Number of bytes copied
+     */
+    private long doCopyFile(FileStatus srcstat, Path tmpfile, Path absdst,
+                            Reporter reporter) throws IOException {
+      FSDataInputStream in = null;
+      FSDataOutputStream out = null;
+      long bytesCopied = 0L;
+      try {
+        Path srcPath = srcstat.getPath();
+        // open src file
+        in = srcPath.getFileSystem(job).open(srcPath);
+        reporter.incrCounter(Counter.BYTESEXPECTED, srcstat.getLen());
+        // open tmp file
+        out = create(tmpfile, reporter, srcstat);
+        LOG.info("Copying file " + srcPath + " of size " +
+                 srcstat.getLen() + " bytes...");
+        
+        // copy file
+        for(int bytesRead; (bytesRead = in.read(buffer)) >= 0; ) {
+          out.write(buffer, 0, bytesRead);
+          bytesCopied += bytesRead;
+          reporter.setStatus(
+              String.format("%.2f ", bytesCopied*100.0/srcstat.getLen())
+              + absdst + " [ " +
+              StringUtils.humanReadableInt(bytesCopied) + " / " +
+              StringUtils.humanReadableInt(srcstat.getLen()) + " ]");
+        }
+      } finally {
+        checkAndClose(in);
+        checkAndClose(out);
+      }
+      return bytesCopied;
+    }
+    
+    /**
      * Copy a file to a destination.
      * @param srcstat src path and metadata
      * @param dstpath dst path
      * @param reporter
+     * @throws IOException if copy fails(even if the validation of copy fails)
      */
     private void copy(FileStatus srcstat, Path relativedst,
         OutputCollector<WritableComparable<?>, Text> outc, Reporter reporter)
@@ -380,6 +474,16 @@
       int totfiles = job.getInt(SRC_COUNT_LABEL, -1);
       assert totfiles >= 0 : "Invalid file count " + totfiles;
 
+      if (totfiles == 1) {
+        // Copying a single file; use dst path provided by user as
+        // destination file rather than destination directory
+        Path dstparent = absdst.getParent();
+        if (!(destFileSys.exists(dstparent) &&
+              destFileSys.getFileStatus(dstparent).isDir())) {
+          absdst = dstparent;
+        }
+      }
+      
       // if a directory, ensure created even if empty
       if (srcstat.isDir()) {
         if (destFileSys.exists(absdst)) {
@@ -397,81 +501,41 @@
         return;
       }
 
-      if (destFileSys.exists(absdst) && !overwrite
-          && !needsUpdate(srcstat, destFileSys, absdst)) {
-        outc.collect(null, new Text("SKIP: " + srcstat.getPath()));
-        ++skipcount;
-        reporter.incrCounter(Counter.SKIP, 1);
-        updateStatus(reporter);
+      // Can we skip copying this file ?
+      if (skipCopyFile(srcstat, absdst, outc, reporter)) {
         return;
       }
 
       Path tmpfile = new Path(job.get(TMP_DIR_LABEL), relativedst);
-      long cbcopied = 0L;
-      FSDataInputStream in = null;
-      FSDataOutputStream out = null;
-      try {
-        // open src file
-        in = srcstat.getPath().getFileSystem(job).open(srcstat.getPath());
-        reporter.incrCounter(Counter.BYTESEXPECTED, srcstat.getLen());
-        // open tmp file
-        out = create(tmpfile, reporter, srcstat);
-        // copy file
-        for(int cbread; (cbread = in.read(buffer)) >= 0; ) {
-          out.write(buffer, 0, cbread);
-          cbcopied += cbread;
-          reporter.setStatus(
-              String.format("%.2f ", cbcopied*100.0/srcstat.getLen())
-              + absdst + " [ " +
-              StringUtils.humanReadableInt(cbcopied) + " / " +
-              StringUtils.humanReadableInt(srcstat.getLen()) + " ]");
-        }
-      } finally {
-        checkAndClose(in);
-        checkAndClose(out);
-      }
+      // do the actual copy to tmpfile
+      long bytesCopied = doCopyFile(srcstat, tmpfile, absdst, reporter);
 
-      if (cbcopied != srcstat.getLen()) {
+      if (bytesCopied != srcstat.getLen()) {
         throw new IOException("File size not matched: copied "
-            + bytesString(cbcopied) + " to tmpfile (=" + tmpfile
+            + bytesString(bytesCopied) + " to tmpfile (=" + tmpfile
             + ") but expected " + bytesString(srcstat.getLen()) 
             + " from " + srcstat.getPath());        
       }
       else {
-        if (totfiles == 1) {
-          // Copying a single file; use dst path provided by user as destination
-          // rather than destination directory, if a file
-          Path dstparent = absdst.getParent();
-          if (!(destFileSys.exists(dstparent) &&
-                destFileSys.getFileStatus(dstparent).isDir())) {
-            absdst = dstparent;
-          }
-        }
         if (destFileSys.exists(absdst) &&
             destFileSys.getFileStatus(absdst).isDir()) {
           throw new IOException(absdst + " is a directory");
         }
         if (!destFileSys.mkdirs(absdst.getParent())) {
-          throw new IOException("Failed to craete parent dir: " + absdst.getParent());
+          throw new IOException("Failed to create parent dir: " + absdst.getParent());
         }
         rename(tmpfile, absdst);
 
-        FileStatus dststat = destFileSys.getFileStatus(absdst);
-        if (dststat.getLen() != srcstat.getLen()) {
+        if (!validateCopy(srcstat, absdst)) {
           destFileSys.delete(absdst, false);
-          throw new IOException("File size not matched: copied "
-              + bytesString(dststat.getLen()) + " to dst (=" + absdst
-              + ") but expected " + bytesString(srcstat.getLen()) 
-              + " from " + srcstat.getPath());        
+          throw new IOException("Validation of copy of file "
+              + srcstat.getPath() + " failed.");
         } 
-        updateDestStatus(srcstat, dststat);
+        updateDestStatus(srcstat, destFileSys.getFileStatus(absdst));
       }
 
       // report at least once for each file
-      ++copycount;
-      reporter.incrCounter(Counter.BYTESCOPIED, cbcopied);
-      reporter.incrCounter(Counter.COPY, 1);
-      updateStatus(reporter);
+      updateCopyStatus(srcstat, reporter);
     }
     
     /** rename tmp to dst, delete dst if already exists */
@@ -501,6 +565,41 @@
       return b + " bytes (" + StringUtils.humanReadableInt(b) + ")";
     }
 
+    /**
+     * Copies a file and validates the copy by checking the checksums.
+     * If validation fails, retries (max number of tries is distcp.file.retries)
+     * to copy the file.
+     */
+    void copyWithRetries(FileStatus srcstat, Path relativedst,
+                         OutputCollector<WritableComparable<?>, Text> out,
+                         Reporter reporter) throws IOException {
+
+      // max tries to copy when validation of copy fails
+      final int maxRetries = job.getInt(FILE_RETRIES_LABEL, DEFAULT_FILE_RETRIES);
+      // save update flag for later copies within the same map task
+      final boolean saveUpdate = update;
+      
+      int retryCnt = 1;
+      for (; retryCnt <= maxRetries; retryCnt++) {
+        try {
+          //copy the file and validate copy
+          copy(srcstat, relativedst, out, reporter);
+          break;// copy successful
+        } catch (IOException e) {
+          LOG.warn("Copy of " + srcstat.getPath() + " failed.", e);
+          if (retryCnt < maxRetries) {// copy failed and need to retry
+            LOG.info("Retrying copy of file " + srcstat.getPath());
+            update = true; // set update flag for retries
+          }
+          else {// no more retries... Give up
+            update = saveUpdate;
+            throw new IOException("Copy of file failed even with " + retryCnt
+                                  + " tries.", e);
+          }
+        }
+      }
+    }
+    
     /** Mapper configuration.
      * Extracts source and destination file system, as well as
      * top-level paths on source and destination directories.
@@ -539,7 +638,7 @@
       final FileStatus srcstat = value.input;
       final Path relativedst = new Path(value.output);
       try {
-        copy(srcstat, relativedst, out, reporter);
+        copyWithRetries(srcstat, relativedst, out, reporter);
       } catch (IOException e) {
         ++failcount;
         reporter.incrCounter(Counter.FAIL, 1);
@@ -622,7 +721,7 @@
 
     final Path dst = new Path(destPath);
     copy(conf, new Arguments(tmp, null, dst, logPath, flags, null,
-        Long.MAX_VALUE, Long.MAX_VALUE, null));
+        Long.MAX_VALUE, Long.MAX_VALUE, null, false));
   }
 
   /** Sanity check for srcPath */
@@ -656,7 +755,9 @@
   static void copy(final Configuration conf, final Arguments args
       ) throws IOException {
     LOG.info("srcPaths=" + args.srcs);
-    LOG.info("destPath=" + args.dst);
+    if (!args.dryrun || args.flags.contains(Options.UPDATE)) {
+      LOG.info("destPath=" + args.dst);
+    }
     checkSrcPath(conf, args.srcs);
 
     JobConf job = createJobConf(conf);
@@ -672,10 +773,14 @@
       if (setup(conf, job, args)) {
         JobClient.runJob(job);
       }
-      finalize(conf, job, args.dst, args.preservedAttributes);
+      if(!args.dryrun) {
+        finalize(conf, job, args.dst, args.preservedAttributes);
+      }
     } finally {
-      //delete tmp
-      fullyDelete(job.get(TMP_DIR_LABEL), job);
+      if (!args.dryrun) {
+        //delete tmp
+        fullyDelete(job.get(TMP_DIR_LABEL), job);
+      }
       //delete jobDirectory
       fullyDelete(job.get(JOB_DIR_LABEL), job);
     }
@@ -736,7 +841,7 @@
     }
   }
 
-  static private class Arguments {
+  static class Arguments {
     final List<Path> srcs;
     final Path basedir;
     final Path dst;
@@ -746,6 +851,7 @@
     final long filelimit;
     final long sizelimit;
     final String mapredSslConf;
+    final boolean dryrun;
     
     /**
      * Arguments for distcp
@@ -760,7 +866,8 @@
      */
     Arguments(List<Path> srcs, Path basedir, Path dst, Path log,
         EnumSet<Options> flags, String preservedAttributes,
-        long filelimit, long sizelimit, String mapredSslConf) {
+        long filelimit, long sizelimit, String mapredSslConf,
+        boolean dryrun) {
       this.srcs = srcs;
       this.basedir = basedir;
       this.dst = dst;
@@ -770,6 +877,7 @@
       this.filelimit = filelimit;
       this.sizelimit = sizelimit;
       this.mapredSslConf = mapredSslConf;
+      this.dryrun = dryrun;
       
       if (LOG.isTraceEnabled()) {
         LOG.trace("this = " + this);
@@ -787,6 +895,7 @@
       String mapredSslConf = null;
       long filelimit = Long.MAX_VALUE;
       long sizelimit = Long.MAX_VALUE;
+      boolean dryrun = false;
 
       for (int idx = 0; idx < args.length; idx++) {
         Options[] opt = Options.values();
@@ -825,6 +934,9 @@
             throw new IllegalArgumentException("ssl conf file not specified in -mapredSslConf");
           }
           mapredSslConf = args[idx];
+        } else if ("-dryrun".equals(args[idx])) {
+          dryrun = true;
+          dst = new Path("/tmp/distcp_dummy_dest");//dummy destination
         } else if ("-m".equals(args[idx])) {
           if (++idx == args.length) {
             throw new IllegalArgumentException("num_maps not specified in -m");
@@ -837,7 +949,8 @@
           }
         } else if ('-' == args[idx].codePointAt(0)) {
           throw new IllegalArgumentException("Invalid switch " + args[idx]);
-        } else if (idx == args.length -1) {
+        } else if (idx == args.length -1 &&
+                   (!dryrun || flags.contains(Options.UPDATE))) {
           dst = new Path(args[idx]);
         } else {
           srcs.add(new Path(args[idx]));
@@ -861,7 +974,7 @@
             + Options.UPDATE + ".");
       }
       return new Arguments(srcs, basedir, dst, log, flags, presevedAttributes,
-          filelimit, sizelimit, mapredSslConf);
+          filelimit, sizelimit, mapredSslConf, dryrun);
     }
     
     /** {@inheritDoc} */
@@ -973,14 +1086,17 @@
   static void fullyDelete(String dir, Configuration conf) throws IOException {
     if (dir != null) {
       Path tmp = new Path(dir);
-      tmp.getFileSystem(conf).delete(tmp, true);
+      boolean success = tmp.getFileSystem(conf).delete(tmp, true);
+      if (!success) {
+        LOG.warn("Could not fully delete " + tmp);
+      }
     }
   }
 
   //Job configuration
   private static JobConf createJobConf(Configuration conf) {
     JobConf jobconf = new JobConf(conf, DistCp.class);
-    jobconf.setJobName(NAME);
+    jobconf.setJobName(conf.get("mapred.job.name", NAME));
 
     // turn off speculative execution, because DFS doesn't handle
     // multiple writers to the same file.
@@ -1023,20 +1139,40 @@
   }
   
   /**
+   * Does the dir already exist at destination ?
+   * @return true   if the dir already exists at destination
+   */
+  private static boolean dirExists(Configuration conf, Path dst)
+                 throws IOException {
+    FileSystem destFileSys = dst.getFileSystem(conf);
+    FileStatus status = null;
+    try {
+      status = destFileSys.getFileStatus(dst);
+    }catch (FileNotFoundException e) {
+      return false;
+    }
+    if (!status.isDir()) {
+      throw new FileAlreadyExistsException("Not a dir: " + dst+" is a file.");
+    }
+    return true;
+  }
+  
+  /**
    * Initialize DFSCopyFileMapper specific job-configuration.
    * @param conf : The dfs/mapred configuration.
    * @param jobConf : The handle to the jobConf object to be initialized.
    * @param args Arguments
    * @return true if it is necessary to launch a job.
    */
-  private static boolean setup(Configuration conf, JobConf jobConf,
+  static boolean setup(Configuration conf, JobConf jobConf,
                             final Arguments args)
       throws IOException {
     jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString());
 
     //set boolean values
     final boolean update = args.flags.contains(Options.UPDATE);
-    final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE);
+    final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE)
+                              && !args.dryrun;
     jobConf.setBoolean(Options.UPDATE.propertyname, update);
     jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite);
     jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname,
@@ -1062,6 +1198,12 @@
       String filename = "_distcp_logs_" + randomId;
       if (!dstExists || !dstIsDir) {
         Path parent = args.dst.getParent();
+        if (null == parent) {
+          // If dst is '/' on S3, it might not exist yet, but dst.getParent()
+          // will return null. In this case, use '/' as its own parent to prevent
+          // NPE errors below.
+          parent = args.dst;
+        }
         if (!dstfs.exists(parent)) {
           dstfs.mkdirs(parent);
         }
@@ -1098,7 +1240,8 @@
     final boolean special =
       (args.srcs.size() == 1 && !dstExists) || update || overwrite;
     int srcCount = 0, cnsyncf = 0, dirsyn = 0;
-    long fileCount = 0L, dirCount = 0L, byteCount = 0L, cbsyncs = 0L;
+    long fileCount = 0L, dirCount = 0L, byteCount = 0L, cbsyncs = 0L,
+         skipFileCount = 0L, skipByteCount = 0L;
     
     Path basedir = null;
     HashSet<Path> parentDirsToCopy = new HashSet<Path>(); 
@@ -1116,7 +1259,14 @@
         FileSystem srcfs = src.getFileSystem(conf);
         FileStatus srcfilestat = srcfs.getFileStatus(src);
         Path root = special && srcfilestat.isDir()? src: src.getParent();
-    
+        if (dstExists && !dstIsDir &&
+            (args.srcs.size() > 1 || srcfilestat.isDir())) {
+          // destination should not be a file
+          throw new IOException("Destination " + args.dst + " should be a dir" +
+                                " if multiple source paths are there OR if" +
+                                " the source path is a dir");
+        }
+
         if (basedir != null) {
           root = basedir;
           Path parent = src.getParent().makeQualified(srcfs);
@@ -1144,9 +1294,12 @@
         
         if (srcfilestat.isDir()) {
           ++srcCount;
-          ++dirCount;
           final String dst = makeRelative(root,src);
-          src_writer.append(new LongWritable(0), new FilePair(srcfilestat, dst));
+          if (!update || !dirExists(conf, new Path(args.dst, dst))) {
+            ++dirCount;
+            src_writer.append(new LongWritable(0),
+                              new FilePair(srcfilestat, dst));
+          }
           dst_writer.append(new Text(dst), new Text(src.toString()));
         }
 
@@ -1155,23 +1308,39 @@
           FileStatus cur = pathstack.pop();
           FileStatus[] children = srcfs.listStatus(cur.getPath());
           for(int i = 0; i < children.length; i++) {
-            boolean skipfile = false;
+            boolean skipPath = false;
             final FileStatus child = children[i]; 
             final String dst = makeRelative(root, child.getPath());
             ++srcCount;
 
             if (child.isDir()) {
               pathstack.push(child);
-              ++dirCount;
+              if (!update || !dirExists(conf, new Path(args.dst, dst))) {
+                ++dirCount;
+              }
+              else {
+                skipPath = true; // skip creating dir at destination
+              }
             }
             else {
-              //skip file if the src and the dst files are the same.
-              skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst));
-              //skip file if it exceed file limit or size limit
-              skipfile |= fileCount == args.filelimit
+              Path destPath = new Path(args.dst, dst);
+              if (!cur.isDir() && (args.srcs.size() == 1)) {
+                // Copying a single file; use dst path provided by user as
+                // destination file rather than destination directory
+                Path dstparent = destPath.getParent();
+                FileSystem destFileSys = destPath.getFileSystem(jobConf);
+                if (!(destFileSys.exists(dstparent) &&
+                    destFileSys.getFileStatus(dstparent).isDir())) {
+                  destPath = dstparent;
+                }
+              }
+              //skip path if the src and the dst files are the same.
+              skipPath = update && sameFile(srcfs, child, dstfs, destPath);
+              //skip path if it exceed file limit or size limit
+              skipPath |= fileCount == args.filelimit
                           || byteCount + child.getLen() > args.sizelimit; 
 
-              if (!skipfile) {
+              if (!skipPath) {
                 ++fileCount;
                 byteCount += child.getLen();
 
@@ -1188,9 +1357,16 @@
                   cbsyncs = 0L;
                 }
               }
+              else {
+                ++skipFileCount;
+                skipByteCount += child.getLen();
+                if (LOG.isTraceEnabled()) {
+                  LOG.trace("skipping file " + child.getPath());
+                }
+              }
             }
 
-            if (!skipfile) {
+            if (!skipPath) {
               src_writer.append(new LongWritable(child.isDir()? 0: child.getLen()),
                   new FilePair(child, dst));
             }
@@ -1214,7 +1390,17 @@
       checkAndClose(dst_writer);
       checkAndClose(dir_writer);
     }
-
+    LOG.info("sourcePathsCount(files+directories)=" + srcCount);
+    LOG.info("filesToCopyCount=" + fileCount);
+    LOG.info("bytesToCopyCount=" + StringUtils.humanReadableInt(byteCount));
+    if (update) {
+      LOG.info("filesToSkipCopyCount=" + skipFileCount);
+      LOG.info("bytesToSkipCopyCount=" +
+               StringUtils.humanReadableInt(skipByteCount));
+    }
+    if (args.dryrun) {
+      return false;
+    }
     int mapCount = setMapCount(byteCount, jobConf);
     // Increase the replication of _distcp_src_files, if needed
     setReplication(conf, jobConf, srcfilelist, mapCount);
@@ -1237,14 +1423,21 @@
     checkDuplication(jobfs, dstfilelist, sorted, conf);
 
     if (dststatus != null && args.flags.contains(Options.DELETE)) {
-      deleteNonexisting(dstfs, dststatus, sorted,
+      long deletedPathsCount = deleteNonexisting(dstfs, dststatus, sorted,
           jobfs, jobDirectory, jobConf, conf);
+      LOG.info("deletedPathsFromDestCount(files+directories)=" +
+               deletedPathsCount);
     }
 
     Path tmpDir = new Path(
         (dstExists && !dstIsDir) || (!dstExists && srcCount == 1)?
         args.dst.getParent(): args.dst, "_distcp_tmp_" + randomId);
     jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString());
+
+    // Explicitly create the tmpDir to ensure that it can be cleaned
+    // up by fullyDelete() later.
+    tmpDir.getFileSystem(conf).mkdirs(tmpDir);
+
     LOG.info("sourcePathsCount=" + srcCount);
     LOG.info("filesToCopyCount=" + fileCount);
     LOG.info("bytesToCopyCount=" + StringUtils.humanReadableInt(byteCount));
@@ -1308,8 +1501,13 @@
     }
   }
   
-  /** Delete the dst files/dirs which do not exist in src */
-  static private void deleteNonexisting(
+  /**
+   * Delete the dst files/dirs which do not exist in src
+   * 
+   * @return total count of files and directories deleted from destination
+   * @throws IOException
+   */
+  static private long deleteNonexisting(
       FileSystem dstfs, FileStatus dstroot, Path dstsorted,
       FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf
       ) throws IOException {
@@ -1350,6 +1548,7 @@
     //compare lsr list and dst list  
     SequenceFile.Reader lsrin = null;
     SequenceFile.Reader dstin = null;
+    long deletedPathsCount = 0;
     try {
       lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf);
       dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf);
@@ -1377,6 +1576,7 @@
         else {
           //lsrpath does not exist, delete it
           String s = new Path(dstroot.getPath(), lsrpath.toString()).toString();
+          ++deletedPathsCount;
           if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) {
             shellargs[1] = s;
             int r = 0;
@@ -1396,6 +1596,7 @@
       checkAndClose(lsrin);
       checkAndClose(dstin);
     }
+    return deletedPathsCount;
   }
 
   //is x an ancestor path of y?

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/HadoopArchives.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/HadoopArchives.java Sat Nov 28 20:26:01 2009
@@ -56,6 +56,7 @@
 import org.apache.hadoop.mapred.SequenceFileRecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -424,7 +425,7 @@
     conf.setReducerClass(HArchivesReducer.class);
     conf.setMapOutputKeyClass(IntWritable.class);
     conf.setMapOutputValueClass(Text.class);
-    conf.set("hadoop.job.history.user.location", "none");
+    conf.set(JobContext.HISTORY_LOCATION, "none");
     FileInputFormat.addInputPath(conf, jobDirectory);
     //make sure no speculative execution is done
     conf.setSpeculativeExecution(false);
@@ -459,7 +460,7 @@
       // this is tightly tied to map reduce
       // since it does not expose an api 
       // to get the partition
-      partId = conf.getInt("mapred.task.partition", -1);
+      partId = conf.getInt(JobContext.TASK_PARTITION, -1);
       // create a file name using the partition
       // we need to write to this directory
       tmpOutputDir = FileOutputFormat.getWorkOutputPath(conf);
@@ -744,11 +745,16 @@
   public static void main(String[] args) {
     JobConf job = new JobConf(HadoopArchives.class);
     HadoopArchives harchives = new HadoopArchives(job);
-    try {
-      int res = harchives.run(args);
-      System.exit(res);
+    int ret = 0;
+
+    try{
+      ret = ToolRunner.run(harchives, args);
     } catch(Exception e) {
+      LOG.debug("Exception in archives  ", e);
+      System.err.println("Exception in archives");
       System.err.println(e.getLocalizedMessage());
+      System.exit(1);
     }
+    System.exit(ret);
   }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/Logalyzer.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/Logalyzer.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/tools/org/apache/hadoop/tools/Logalyzer.java Sat Nov 28 20:26:01 2009
@@ -46,6 +46,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
+import org.apache.hadoop.mapreduce.lib.map.RegexMapper;
 
 /**
  * Logalyzer: A utility tool for archiving and analyzing hadoop logs.
@@ -64,7 +65,17 @@
 public class Logalyzer {
   // Constants
   private static Configuration fsConfig = new Configuration();
+  public static String SORT_COLUMNS = 
+    "logalizer.logcomparator.sort.columns";
+  public static String COLUMN_SEPARATOR = 
+    "logalizer.logcomparator.column.separator";
   
+  static {
+    Configuration.addDeprecation("mapred.reducer.sort", 
+      new String[] {SORT_COLUMNS});
+    Configuration.addDeprecation("mapred.reducer.separator", 
+      new String[] {COLUMN_SEPARATOR});
+  }
   /** A {@link Mapper} that extracts text matching a regular expression. */
   public static class LogRegexMapper<K extends WritableComparable>
     extends MapReduceBase
@@ -73,7 +84,7 @@
     private Pattern pattern;
     
     public void configure(JobConf job) {
-      pattern = Pattern.compile(job.get("mapred.mapper.regex"));
+      pattern = Pattern.compile(job.get(RegexMapper.PATTERN));
     }
     
     public void map(K key, Text value,
@@ -105,13 +116,13 @@
       }
       
       //Initialize the specification for *comparision*
-      String sortColumns = this.conf.get("mapred.reducer.sort", null);
+      String sortColumns = this.conf.get(SORT_COLUMNS, null);
       if (sortColumns != null) {
         sortSpec = sortColumns.split(",");
       }
       
       //Column-separator
-      columnSeparator = this.conf.get("mapred.reducer.separator", "");
+      columnSeparator = this.conf.get(COLUMN_SEPARATOR, "");
     }
     
     public Configuration getConf() {
@@ -217,9 +228,9 @@
     grepJob.setInputFormat(TextInputFormat.class);
     
     grepJob.setMapperClass(LogRegexMapper.class);
-    grepJob.set("mapred.mapper.regex", grepPattern);
-    grepJob.set("mapred.reducer.sort", sortColumns);
-    grepJob.set("mapred.reducer.separator", columnSeparator);
+    grepJob.set(RegexMapper.PATTERN, grepPattern);
+    grepJob.set(SORT_COLUMNS, sortColumns);
+    grepJob.set(COLUMN_SEPARATOR, columnSeparator);
     
     grepJob.setCombinerClass(LongSumReducer.class);
     grepJob.setReducerClass(LongSumReducer.class);

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
 /hadoop/core/trunk/src/webapps/job:776175-785643
-/hadoop/mapreduce/trunk/src/webapps/job:804974-807678
+/hadoop/mapreduce/trunk/src/webapps/job:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/analysejobhistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/analysejobhistory.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/analysejobhistory.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/analysejobhistory.jsp Sat Nov 28 20:26:01 2009
@@ -26,7 +26,7 @@
   import="org.apache.hadoop.fs.*"
   import="org.apache.hadoop.util.*"
   import="java.text.SimpleDateFormat"
-  import="org.apache.hadoop.mapred.JobHistory.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
 
 <%!	private static SimpleDateFormat dateFormat 
@@ -38,93 +38,96 @@
 <%
   String jobid = request.getParameter("jobid");
   String logFile = request.getParameter("logFile");
-  String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile);
   String numTasks = request.getParameter("numTasks");
   int showTasks = 10 ; 
   if (numTasks != null) {
     showTasks = Integer.parseInt(numTasks);  
   }
   FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-  JobInfo job = JSPUtil.getJobInfo(request, fs);
+  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
 %>
-<h2>Hadoop Job <a href="jobdetailshistory.jsp?jobid=<%=jobid%>&&logFile=<%=encodedLogFileName%>"><%=jobid %> </a></h2>
-<b>User : </b> <%=job.get(Keys.USER) %><br/> 
-<b>JobName : </b> <%=job.get(Keys.JOBNAME) %><br/> 
-<b>JobConf : </b> <%=job.get(Keys.JOBCONF) %><br/> 
-<b>Submitted At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 ) %><br/> 
-<b>Launched At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %><br/>
-<b>Finished At : </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %><br/>
-<b>Status : </b> <%= ((job.get(Keys.JOB_STATUS) == null)?"Incomplete" :job.get(Keys.JOB_STATUS)) %><br/> 
+<h2>Hadoop Job <a href="jobdetailshistory.jsp?jobid=<%=jobid%>&&logFile=<%=logFile%>"><%=jobid %> </a></h2>
+<b>User : </b> <%=job.getUsername() %><br/> 
+<b>JobName : </b> <%=job.getJobname() %><br/> 
+<b>JobConf : </b> <%=job.getJobConfPath() %><br/> 
+<b>Submitted At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getSubmitTime(), 0 ) %><br/> 
+<b>Launched At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLaunchTime(), job.getSubmitTime()) %><br/>
+<b>Finished At : </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getFinishTime(), job.getLaunchTime()) %><br/>
+<b>Status : </b> <%= ((job.getJobStatus() == null)?"Incomplete" :job.getJobStatus()) %><br/> 
 <hr/>
 <center>
 <%
-  if (!Values.SUCCESS.name().equals(job.get(Keys.JOB_STATUS))) {
+  if (!JobStatus.getJobRunState(JobStatus.SUCCEEDED).equals(job.getJobStatus())) {
     out.print("<h3>No Analysis available as job did not finish</h3>");
     return;
   }
-  Map<String, JobHistory.Task> tasks = job.getAllTasks();
-  int finishedMaps = job.getInt(Keys.FINISHED_MAPS)  ;
-  int finishedReduces = job.getInt(Keys.FINISHED_REDUCES) ;
-  JobHistory.Task [] mapTasks = new JobHistory.Task[finishedMaps]; 
-  JobHistory.Task [] reduceTasks = new JobHistory.Task[finishedReduces]; 
-  int mapIndex = 0 , reduceIndex=0; 
-  long avgMapTime = 0;
-  long avgReduceTime = 0;
-  long avgShuffleTime = 0;
-
-  for (JobHistory.Task task : tasks.values()) {
-    Map<String, TaskAttempt> attempts = task.getTaskAttempts();
-    for (JobHistory.TaskAttempt attempt : attempts.values()) {
-      if (attempt.get(Keys.TASK_STATUS).equals(Values.SUCCESS.name())) {
-        long avgFinishTime = (attempt.getLong(Keys.FINISH_TIME) -
-      		                attempt.getLong(Keys.START_TIME));
-        if (Values.MAP.name().equals(task.get(Keys.TASK_TYPE))) {
-          mapTasks[mapIndex++] = attempt ; 
-          avgMapTime += avgFinishTime;
-        } else if (Values.REDUCE.name().equals(task.get(Keys.TASK_TYPE))) { 
-          reduceTasks[reduceIndex++] = attempt;
-          avgShuffleTime += (attempt.getLong(Keys.SHUFFLE_FINISHED) - 
-                             attempt.getLong(Keys.START_TIME));
-          avgReduceTime += (attempt.getLong(Keys.FINISH_TIME) -
-                            attempt.getLong(Keys.SHUFFLE_FINISHED));
-        }
-        break;
-      }
+  
+  HistoryViewer.AnalyzedJob avg = new HistoryViewer.AnalyzedJob(job);
+  JobHistoryParser.TaskAttemptInfo [] mapTasks = avg.getMapTasks();
+  JobHistoryParser.TaskAttemptInfo [] reduceTasks = avg.getReduceTasks();
+
+  Comparator<JobHistoryParser.TaskAttemptInfo> cMap = 
+    new Comparator<JobHistoryParser.TaskAttemptInfo>() {
+    public int compare(JobHistoryParser.TaskAttemptInfo t1, 
+        JobHistoryParser.TaskAttemptInfo t2) {
+      long l1 = t1.getFinishTime() - t1.getStartTime();
+      long l2 = t2.getFinishTime() - t2.getStartTime();
+      return (l2 < l1 ? -1 : (l2 == l1 ? 0 : 1));
     }
-  }
-	 
-  if (finishedMaps > 0) {
-    avgMapTime /= finishedMaps;
-  }
-  if (finishedReduces > 0) {
-    avgReduceTime /= finishedReduces;
-    avgShuffleTime /= finishedReduces;
-  }
-  Comparator<JobHistory.Task> cMap = new Comparator<JobHistory.Task>(){
-    public int compare(JobHistory.Task t1, JobHistory.Task t2){
-      long l1 = t1.getLong(Keys.FINISH_TIME) - t1.getLong(Keys.START_TIME); 
-      long l2 = t2.getLong(Keys.FINISH_TIME) - t2.getLong(Keys.START_TIME);
-      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+  };
+
+  Comparator<JobHistoryParser.TaskAttemptInfo> cShuffle = 
+    new Comparator<JobHistoryParser.TaskAttemptInfo>() {
+    public int compare(JobHistoryParser.TaskAttemptInfo t1, 
+        JobHistoryParser.TaskAttemptInfo t2) {
+      long l1 = t1.getShuffleFinishTime() - t1.getStartTime();
+      long l2 = t2.getShuffleFinishTime() - t2.getStartTime();
+      return (l2 < l1 ? -1 : (l2 == l1 ? 0 : 1));
     }
-  }; 
-  Comparator<JobHistory.Task> cShuffle = new Comparator<JobHistory.Task>(){
-    public int compare(JobHistory.Task t1, JobHistory.Task t2){
-      long l1 = t1.getLong(Keys.SHUFFLE_FINISHED) - 
-                t1.getLong(Keys.START_TIME); 
-      long l2 = t2.getLong(Keys.SHUFFLE_FINISHED) - 
-                t2.getLong(Keys.START_TIME); 
-      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+  };
+
+  Comparator<JobHistoryParser.TaskAttemptInfo> cFinishShuffle = 
+    new Comparator<JobHistoryParser.TaskAttemptInfo>() {
+    public int compare(JobHistoryParser.TaskAttemptInfo t1, 
+        JobHistoryParser.TaskAttemptInfo t2) {
+      long l1 = t1.getShuffleFinishTime(); 
+      long l2 = t2.getShuffleFinishTime();
+      return (l2 < l1 ? -1 : (l2 == l1 ? 0 : 1));
+    }
+  };
+
+  Comparator<JobHistoryParser.TaskAttemptInfo> cFinishMapRed = 
+    new Comparator<JobHistoryParser.TaskAttemptInfo>() {
+    public int compare(JobHistoryParser.TaskAttemptInfo t1, 
+        JobHistoryParser.TaskAttemptInfo t2) {
+      long l1 = t1.getFinishTime(); 
+      long l2 = t2.getFinishTime();
+      return (l2 < l1 ? -1 : (l2 == l1 ? 0 : 1));
+    }
+  };
+  
+  Comparator<JobHistoryParser.TaskAttemptInfo> cReduce = 
+    new Comparator<JobHistoryParser.TaskAttemptInfo>() {
+    public int compare(JobHistoryParser.TaskAttemptInfo t1, 
+        JobHistoryParser.TaskAttemptInfo t2) {
+      long l1 = t1.getFinishTime() -
+                t1.getShuffleFinishTime();
+      long l2 = t2.getFinishTime() -
+                t2.getShuffleFinishTime();
+      return (l2 < l1 ? -1 : (l2 == l1 ? 0 : 1));
     }
   }; 
+
+  if (mapTasks == null || mapTasks.length <= 0) return;
   Arrays.sort(mapTasks, cMap);
-  JobHistory.Task minMap = mapTasks[mapTasks.length-1] ;
+  JobHistoryParser.TaskAttemptInfo minMap = mapTasks[mapTasks.length-1] ;
 %>
 
 <h3>Time taken by best performing Map task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=minMap.get(Keys.TASKID)%>">
-<%=minMap.get(Keys.TASKID) %></a> : <%=StringUtils.formatTimeDiff(minMap.getLong(Keys.FINISH_TIME), minMap.getLong(Keys.START_TIME) ) %></h3>
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=minMap.getAttemptId().getTaskID()%>">
+<%=minMap.getAttemptId().getTaskID() %></a> : <%=StringUtils.formatTimeDiff(minMap.getFinishTime(), minMap.getStartTime() ) %></h3>
 <h3>Average time taken by Map tasks: 
-<%=StringUtils.formatTimeDiff(avgMapTime, 0) %></h3>
+<%=StringUtils.formatTimeDiff(avg.getAvgMapTime(), 0) %></h3>
 <h3>Worse performing map tasks</h3>
 <table border="2" cellpadding="5" cellspacing="2">
 <tr><td>Task Id</td><td>Time taken</td></tr>
@@ -132,48 +135,40 @@
   for (int i=0;i<showTasks && i<mapTasks.length; i++) {
 %>
     <tr>
-    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=mapTasks[i].get(Keys.TASKID)%>">
-        <%=mapTasks[i].get(Keys.TASKID) %></a></td>
-    <td><%=StringUtils.formatTimeDiff(mapTasks[i].getLong(Keys.FINISH_TIME), mapTasks[i].getLong(Keys.START_TIME)) %></td>
+    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=mapTasks[i].getAttemptId().getTaskID()%>">
+        <%=mapTasks[i].getAttemptId().getTaskID() %></a></td>
+    <td><%=StringUtils.formatTimeDiff(mapTasks[i].getFinishTime(), mapTasks[i].getStartTime()) %></td>
     </tr>
 <%
   }
 %>
 </table>
 <%  
-  Comparator<JobHistory.Task> cFinishMapRed = 
-    new Comparator<JobHistory.Task>() {
-    public int compare(JobHistory.Task t1, JobHistory.Task t2){
-      long l1 = t1.getLong(Keys.FINISH_TIME); 
-      long l2 = t2.getLong(Keys.FINISH_TIME);
-      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
-    }
-  };
   Arrays.sort(mapTasks, cFinishMapRed);
-  JobHistory.Task lastMap = mapTasks[0] ;
+  JobHistoryParser.TaskAttemptInfo lastMap = mapTasks[0] ;
 %>
 
 <h3>The last Map task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>
-&taskid=<%=lastMap.get(Keys.TASKID)%>"><%=lastMap.get(Keys.TASKID) %></a> 
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
+&taskid=<%=lastMap.getAttemptId().getTaskID()%>"><%=lastMap.getAttemptId().getTaskID() %></a> 
 finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat, 
-                              lastMap.getLong(Keys.FINISH_TIME), 
-                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+                              lastMap.getFinishTime(), 
+                              job.getLaunchTime()) %></h3>
 <hr/>
 
 <%
   if (reduceTasks.length <= 0) return;
   Arrays.sort(reduceTasks, cShuffle); 
-  JobHistory.Task minShuffle = reduceTasks[reduceTasks.length-1] ;
+  JobHistoryParser.TaskAttemptInfo minShuffle = reduceTasks[reduceTasks.length-1] ;
 %>
 <h3>Time taken by best performing shuffle
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>
-&taskid=<%=minShuffle.get(Keys.TASKID)%>"><%=minShuffle.get(Keys.TASKID)%></a> : 
-<%=StringUtils.formatTimeDiff(minShuffle.getLong(Keys.SHUFFLE_FINISHED), 
-                              minShuffle.getLong(Keys.START_TIME) ) %></h3>
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
+&taskid=<%=minShuffle.getAttemptId().getTaskID()%>"><%=minShuffle.getAttemptId().getTaskID()%></a> : 
+<%=StringUtils.formatTimeDiff(minShuffle.getShuffleFinishTime(),
+                              minShuffle.getStartTime() ) %></h3>
 <h3>Average time taken by Shuffle: 
-<%=StringUtils.formatTimeDiff(avgShuffleTime, 0) %></h3>
+<%=StringUtils.formatTimeDiff(avg.getAvgShuffleTime(), 0) %></h3>
 <h3>Worse performing Shuffle(s)</h3>
 <table border="2" cellpadding="5" cellspacing="2">
 <tr><td>Task Id</td><td>Time taken</td></tr>
@@ -182,12 +177,12 @@
 %>
     <tr>
     <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=
-<%=encodedLogFileName%>&taskid=<%=reduceTasks[i].get(Keys.TASKID)%>">
-<%=reduceTasks[i].get(Keys.TASKID) %></a></td>
+<%=logFile%>&taskid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
+<%=reduceTasks[i].getAttemptId().getTaskID() %></a></td>
     <td><%=
            StringUtils.formatTimeDiff(
-                       reduceTasks[i].getLong(Keys.SHUFFLE_FINISHED),
-                       reduceTasks[i].getLong(Keys.START_TIME)) %>
+                       reduceTasks[i].getShuffleFinishTime(),
+                       reduceTasks[i].getStartTime()) %>
     </td>
     </tr>
 <%
@@ -195,48 +190,31 @@
 %>
 </table>
 <%  
-  Comparator<JobHistory.Task> cFinishShuffle = 
-    new Comparator<JobHistory.Task>() {
-    public int compare(JobHistory.Task t1, JobHistory.Task t2){
-      long l1 = t1.getLong(Keys.SHUFFLE_FINISHED); 
-      long l2 = t2.getLong(Keys.SHUFFLE_FINISHED);
-      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
-    }
-  };
   Arrays.sort(reduceTasks, cFinishShuffle);
-  JobHistory.Task lastShuffle = reduceTasks[0] ;
+  JobHistoryParser.TaskAttemptInfo lastShuffle = reduceTasks[0] ;
 %>
 
 <h3>The last Shuffle  
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>
-&taskid=<%=lastShuffle.get(Keys.TASKID)%>"><%=lastShuffle.get(Keys.TASKID)%>
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
+&taskid=<%=lastShuffle.getAttemptId().getTaskID()%>"><%=lastShuffle.getAttemptId().getTaskID()%>
 </a> finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat,
-                              lastShuffle.getLong(Keys.SHUFFLE_FINISHED), 
-                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+                              lastShuffle.getShuffleFinishTime(),
+                              job.getLaunchTime() ) %></h3>
 
 <%
-  Comparator<JobHistory.Task> cReduce = new Comparator<JobHistory.Task>(){
-    public int compare(JobHistory.Task t1, JobHistory.Task t2){
-      long l1 = t1.getLong(Keys.FINISH_TIME) - 
-                t1.getLong(Keys.SHUFFLE_FINISHED); 
-      long l2 = t2.getLong(Keys.FINISH_TIME) - 
-                t2.getLong(Keys.SHUFFLE_FINISHED);
-      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
-    }
-  }; 
   Arrays.sort(reduceTasks, cReduce); 
-  JobHistory.Task minReduce = reduceTasks[reduceTasks.length-1] ;
+  JobHistoryParser.TaskAttemptInfo minReduce = reduceTasks[reduceTasks.length-1] ;
 %>
 <hr/>
 <h3>Time taken by best performing Reduce task : 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=minReduce.get(Keys.TASKID)%>">
-<%=minReduce.get(Keys.TASKID) %></a> : 
-<%=StringUtils.formatTimeDiff(minReduce.getLong(Keys.FINISH_TIME),
-    minReduce.getLong(Keys.SHUFFLE_FINISHED) ) %></h3>
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=minReduce.getAttemptId().getTaskID()%>">
+<%=minReduce.getAttemptId().getTaskID() %></a> : 
+<%=StringUtils.formatTimeDiff(minReduce.getFinishTime(),
+    minReduce.getShuffleFinishTime() ) %></h3>
 
 <h3>Average time taken by Reduce tasks: 
-<%=StringUtils.formatTimeDiff(avgReduceTime, 0) %></h3>
+<%=StringUtils.formatTimeDiff(avg.getAvgReduceTime(), 0) %></h3>
 <h3>Worse performing reduce tasks</h3>
 <table border="2" cellpadding="5" cellspacing="2">
 <tr><td>Task Id</td><td>Time taken</td></tr>
@@ -244,11 +222,11 @@
   for (int i=0;i<showTasks && i<reduceTasks.length; i++) {
 %>
     <tr>
-    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=reduceTasks[i].get(Keys.TASKID)%>">
-        <%=reduceTasks[i].get(Keys.TASKID) %></a></td>
+    <td><a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=reduceTasks[i].getAttemptId().getTaskID()%>">
+        <%=reduceTasks[i].getAttemptId().getTaskID() %></a></td>
     <td><%=StringUtils.formatTimeDiff(
-             reduceTasks[i].getLong(Keys.FINISH_TIME), 
-             reduceTasks[i].getLong(Keys.SHUFFLE_FINISHED)) %></td>
+             reduceTasks[i].getFinishTime(),
+             reduceTasks[i].getShuffleFinishTime()) %></td>
     </tr>
 <%
   }
@@ -256,15 +234,15 @@
 </table>
 <%  
   Arrays.sort(reduceTasks, cFinishMapRed);
-  JobHistory.Task lastReduce = reduceTasks[0] ;
+  JobHistoryParser.TaskAttemptInfo lastReduce = reduceTasks[0] ;
 %>
 
 <h3>The last Reduce task 
-<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>
-&taskid=<%=lastReduce.get(Keys.TASKID)%>"><%=lastReduce.get(Keys.TASKID)%>
+<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>
+&taskid=<%=lastReduce.getAttemptId().getTaskID()%>"><%=lastReduce.getAttemptId().getTaskID()%>
 </a> finished at (relative to the Job launch time): 
 <%=StringUtils.getFormattedTimeWithDiff(dateFormat,
-                              lastReduce.getLong(Keys.FINISH_TIME), 
-                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+                              lastReduce.getFinishTime(),
+                              job.getLaunchTime() ) %></h3>
 </center>
 </body></html>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf.jsp Sat Nov 28 20:26:01 2009
@@ -24,6 +24,7 @@
   import="java.io.*"
   import="java.net.URL"
   import="org.apache.hadoop.mapred.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*"
   import="org.apache.hadoop.util.*"
 %>
 <%!	private static final long serialVersionUID = 1L;
@@ -47,7 +48,7 @@
 <h2>Job Configuration: JobId - <%= jobId %></h2><br>
 
 <%
-  String jobFilePath = JobTracker.getLocalJobFilePath(JobID.forName(jobId));
+  String jobFilePath = tracker.getLocalJobFilePath(JobID.forName(jobId));
   FileInputStream jobFile = null;
   try {
     jobFile = new FileInputStream(jobFilePath);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf_history.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf_history.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf_history.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobconf_history.jsp Sat Nov 28 20:26:01 2009
@@ -26,6 +26,7 @@
   import="org.apache.hadoop.mapred.*"
   import="org.apache.hadoop.fs.*"
   import="org.apache.hadoop.util.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
 
 <%!	private static final long serialVersionUID = 1L;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobdetailshistory.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobdetailshistory.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobdetailshistory.jsp Sat Nov 28 20:26:01 2009
@@ -23,132 +23,53 @@
   import="java.io.*"
   import="java.util.*"
   import="org.apache.hadoop.fs.*"
+  import="org.apache.hadoop.mapreduce.TaskAttemptID"
+  import="org.apache.hadoop.mapreduce.TaskID"
+  import="org.apache.hadoop.mapreduce.Counter"
+  import="org.apache.hadoop.mapreduce.Counters"
+  import="org.apache.hadoop.mapreduce.CounterGroup"
   import="org.apache.hadoop.mapred.*"
   import="org.apache.hadoop.util.*"
-  import="java.text.SimpleDateFormat"
-  import="org.apache.hadoop.mapred.JobHistory.*"
+  import="java.text.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
-<%!	private static final long serialVersionUID = 1L;
+<%!private static final long serialVersionUID = 1L;
 %>
 
 <%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
 <%
     String jobid = request.getParameter("jobid");
     String logFile = request.getParameter("logFile");
-	String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile);
-	
+
     Path jobFile = new Path(logFile);
     String[] jobDetails = jobFile.getName().split("_");
-    String jobUniqueString = jobDetails[0] + "_" +jobDetails[1] + "_" + jobid ;
-	
+    String jobUniqueString = jobid;
+
     FileSystem fs = (FileSystem) application.getAttribute("fileSys");
-    JobInfo job = JSPUtil.getJobInfo(request, fs);
+    JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
 %>
-<html><body>
+
+<html>
+<head>
+<title>Hadoop Job <%=jobid%> on History Viewer</title>
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
+</head>
+<body>
+
 <h2>Hadoop Job <%=jobid %> on <a href="jobhistory.jsp">History Viewer</a></h2>
 
-<b>User: </b> <%=job.get(Keys.USER) %><br/> 
-<b>JobName: </b> <%=job.get(Keys.JOBNAME) %><br/> 
+<b>User: </b> <%=job.getUsername() %><br/> 
+<b>JobName: </b> <%=job.getJobname() %><br/> 
 <b>JobConf: </b> <a href="jobconf_history.jsp?jobid=<%=jobid%>&jobLogDir=<%=new Path(logFile).getParent().toString()%>&jobUniqueString=<%=jobUniqueString%>"> 
-                 <%=job.get(Keys.JOBCONF) %></a><br/> 
-<b>Submitted At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 )  %><br/> 
-<b>Launched At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %><br/>
-<b>Finished At: </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %><br/>
-<b>Status: </b> <%= ((job.get(Keys.JOB_STATUS) == "")?"Incomplete" :job.get(Keys.JOB_STATUS)) %><br/> 
+                 <%=job.getJobConfPath() %></a><br/> 
+<b>Submitted At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getSubmitTime(), 0 )  %><br/> 
+<b>Launched At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLaunchTime(), job.getSubmitTime()) %><br/>
+<b>Finished At: </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getFinishTime(), job.getLaunchTime()) %><br/>
+<b>Status: </b> <%= ((job.getJobStatus()) == null ? "Incomplete" :job.getJobStatus()) %><br/> 
 <%
-    Map<String, JobHistory.Task> tasks = job.getAllTasks();
-    int totalMaps = 0 ; 
-    int totalReduces = 0;
-    int totalCleanups = 0; 
-    int totalSetups = 0; 
-    int numFailedMaps = 0; 
-    int numKilledMaps = 0;
-    int numFailedReduces = 0 ; 
-    int numKilledReduces = 0;
-    int numFinishedCleanups = 0;
-    int numFailedCleanups = 0;
-    int numKilledCleanups = 0;
-    int numFinishedSetups = 0;
-    int numFailedSetups = 0;
-    int numKilledSetups = 0;
-	
-    long mapStarted = 0 ; 
-    long mapFinished = 0 ; 
-    long reduceStarted = 0 ; 
-    long reduceFinished = 0;
-    long cleanupStarted = 0;
-    long cleanupFinished = 0; 
-    long setupStarted = 0;
-    long setupFinished = 0; 
-        
-    Map <String,String> allHosts = new TreeMap<String,String>();
-    for (JobHistory.Task task : tasks.values()) {
-      Map<String, TaskAttempt> attempts = task.getTaskAttempts();
-      allHosts.put(task.get(Keys.HOSTNAME), "");
-      for (TaskAttempt attempt : attempts.values()) {
-        long startTime = attempt.getLong(Keys.START_TIME) ; 
-        long finishTime = attempt.getLong(Keys.FINISH_TIME) ; 
-        if (Values.MAP.name().equals(task.get(Keys.TASK_TYPE))){
-          if (mapStarted==0 || mapStarted > startTime ) {
-            mapStarted = startTime; 
-          }
-          if (mapFinished < finishTime ) {
-            mapFinished = finishTime ; 
-          }
-          totalMaps++; 
-          if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFailedMaps++; 
-          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numKilledMaps++;
-          }
-        } else if (Values.REDUCE.name().equals(task.get(Keys.TASK_TYPE))) {
-          if (reduceStarted==0||reduceStarted > startTime) {
-            reduceStarted = startTime ; 
-          }
-          if (reduceFinished < finishTime) {
-            reduceFinished = finishTime; 
-          }
-          totalReduces++; 
-          if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFailedReduces++;
-          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numKilledReduces++;
-          }
-        } else if (Values.CLEANUP.name().equals(task.get(Keys.TASK_TYPE))) {
-          if (cleanupStarted==0||cleanupStarted > startTime) {
-            cleanupStarted = startTime ; 
-          }
-          if (cleanupFinished < finishTime) {
-            cleanupFinished = finishTime; 
-          }
-          totalCleanups++; 
-          if (Values.SUCCESS.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFinishedCleanups++;
-          } else if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFailedCleanups++;
-          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numKilledCleanups++;
-          } 
-        } else if (Values.SETUP.name().equals(task.get(Keys.TASK_TYPE))) {
-          if (setupStarted==0||setupStarted > startTime) {
-            setupStarted = startTime ; 
-          }
-          if (setupFinished < finishTime) {
-            setupFinished = finishTime; 
-          }
-          totalSetups++; 
-          if (Values.SUCCESS.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFinishedSetups++;
-          } else if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numFailedSetups++;
-          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
-            numKilledSetups++;
-          }
-        }
-      }
-    }
+    HistoryViewer.SummarizedJob sj = new HistoryViewer.SummarizedJob(job);
 %>
-<b><a href="analysejobhistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>">Analyse This Job</a></b> 
+<b><a href="analysejobhistory.jsp?jobid=<%=jobid %>&logFile=<logFile%>">Analyse This Job</a></b> 
 <hr/>
 <center>
 <table border="2" cellpadding="5" cellspacing="2">
@@ -157,82 +78,148 @@
 </tr>
 <tr>
 <td>Setup</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=all">
-        <%=totalSetups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.SUCCESS %>">
-        <%=numFinishedSetups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.FAILED %>">
-        <%=numFailedSetups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.KILLED %>">
-        <%=numKilledSetups%></a></td>  
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, setupStarted, 0) %></td>
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, setupFinished, setupStarted) %></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=all">
+        <%=sj.getTotalSetups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=SUCCEEDED">
+        <%=sj.getNumFinishedSetups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=FAILED">
+        <%=sj.getNumFailedSetups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_SETUP&status=KILLED">
+        <%=sj.getNumKilledSetups()%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupStarted(), 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupFinished(), sj.getSetupStarted()) %></td>
 </tr>
 <tr>
 <td>Map</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=all">
-        <%=totalMaps %></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.SUCCESS %>">
-        <%=job.getInt(Keys.FINISHED_MAPS) %></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.FAILED %>">
-        <%=numFailedMaps %></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.KILLED %>">
-        <%=numKilledMaps %></a></td>
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, mapStarted, 0) %></td>
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, mapFinished, mapStarted) %></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=all">
+        <%=sj.getTotalMaps()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=SUCCEEDED">
+        <%=job.getFinishedMaps() %></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=FAILED">
+        <%=sj.getNumFailedMaps()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=MAP&status=KILLED">
+        <%=sj.getNumKilledMaps()%></a></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapStarted(), 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapFinished(), sj.getMapStarted()) %></td>
 </tr>
 <tr>
 <td>Reduce</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=all">
-        <%=totalReduces%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.SUCCESS %>">
-        <%=job.getInt(Keys.FINISHED_REDUCES)%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.FAILED %>">
-        <%=numFailedReduces%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.KILLED %>">
-        <%=numKilledReduces%></a></td>  
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, reduceStarted, 0) %></td>
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, reduceFinished, reduceStarted) %></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=all">
+        <%=sj.getTotalReduces()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=SUCCEEDED">
+        <%=job.getFinishedReduces()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=FAILED">
+        <%=sj.getNumFailedReduces()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=REDUCE&status=KILLED">
+        <%=sj.getNumKilledReduces()%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceStarted(), 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceFinished(), sj.getReduceStarted()) %></td>
 </tr>
 <tr>
 <td>Cleanup</td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=all">
-        <%=totalCleanups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.SUCCESS %>">
-        <%=numFinishedCleanups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.FAILED %>">
-        <%=numFailedCleanups%></a></td>
-    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.KILLED %>">
-        <%=numKilledCleanups%></a></td>  
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, cleanupStarted, 0) %></td>
-    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, cleanupFinished, cleanupStarted) %></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=all">
+        <%=sj.getTotalCleanups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=SUCCEEDED">
+        <%=sj.getNumFinishedCleanups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=FAILED">
+        <%=sj.getNumFailedCleanups()%></a></td>
+    <td><a href="jobtaskshistory.jsp?jobid=<%=jobid %>&logFile=<%=logFile%>&taskType=JOB_CLEANUP&status=KILLED>">
+        <%=sj.getNumKilledCleanups()%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupStarted(), 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupFinished(), sj.getCleanupStarted()) %></td>
 </tr>
 </table>
 
+<br>
+<br>
+
+<table border=2 cellpadding="5" cellspacing="2">
+  <tr>
+  <th><br/></th>
+  <th>Counter</th>
+  <th>Map</th>
+  <th>Reduce</th>
+  <th>Total</th>
+</tr>
+
+<%  
+
+ Counters totalCounters = job.getTotalCounters();
+ Counters mapCounters = job.getMapCounters();
+ Counters reduceCounters = job.getReduceCounters();
+
+ if (totalCounters != null) {
+   for (String groupName : totalCounters.getGroupNames()) {
+     CounterGroup totalGroup = totalCounters.getGroup(groupName);
+     CounterGroup mapGroup = mapCounters.getGroup(groupName);
+     CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
+  
+     Format decimal = new DecimalFormat();
+  
+     boolean isFirst = true;
+     Iterator<Counter> ctrItr = totalGroup.iterator();
+     while(ctrItr.hasNext()) {
+       Counter counter = ctrItr.next();
+       String name = counter.getName();
+       String mapValue = 
+        decimal.format(mapGroup.findCounter(name).getValue());
+       String reduceValue = 
+        decimal.format(reduceGroup.findCounter(name).getValue());
+       String totalValue = 
+        decimal.format(counter.getValue());
+%>
+       <tr>
+<%
+       if (isFirst) {
+         isFirst = false;
+%>
+         <td rowspan="<%=totalGroup.size()%>"><%=totalGroup.getDisplayName()%></td>
+<%
+       }
+%>
+       <td><%=counter.getDisplayName()%></td>
+       <td align="right"><%=mapValue%></td>
+       <td align="right"><%=reduceValue%></td>
+       <td align="right"><%=totalValue%></td>
+     </tr>
+<%
+      }
+    }
+  }
+%>
+</table>
+<br>
+
 <br/>
  <%
-    DefaultJobHistoryParser.FailedOnNodesFilter filter = 
-                 new DefaultJobHistoryParser.FailedOnNodesFilter();
-    JobHistory.parseHistoryFromFS(logFile, filter, fs); 
-    Map<String, Set<String>> badNodes = filter.getValues(); 
+    HistoryViewer.FilteredJob filter = new HistoryViewer.FilteredJob(job,TaskStatus.State.FAILED.toString()); 
+    Map<String, Set<TaskID>> badNodes = filter.getFilteredMap(); 
     if (badNodes.size() > 0) {
  %>
 <h3>Failed tasks attempts by nodes </h3>
 <table border="1">
 <tr><td>Hostname</td><td>Failed Tasks</td></tr>
  <%	  
-      for (Map.Entry<String, Set<String>> entry : badNodes.entrySet()) {
+      for (Map.Entry<String, Set<TaskID>> entry : badNodes.entrySet()) {
         String node = entry.getKey();
-        Set<String> failedTasks = entry.getValue();
+        Set<TaskID> failedTasks = entry.getValue();
 %>
         <tr>
         <td><%=node %></td>
         <td>
 <%
-        for (String t : failedTasks) {
+          boolean firstId = true;
+          for (TaskID tid : failedTasks) {
+             if (firstId) {
+              firstId = false;
 %>
-          <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=t %>"><%=t %></a>,&nbsp;
+            <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+<%		  
+          } else {
+%>	
+            ,&nbsp<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
 <%		  
+          }
         }
 %>	
         </td>
@@ -243,29 +230,36 @@
  %>
 </table>
 <br/>
+
  <%
-    DefaultJobHistoryParser.KilledOnNodesFilter killedFilter =
-                 new DefaultJobHistoryParser.KilledOnNodesFilter();
-    JobHistory.parseHistoryFromFS(logFile, filter, fs); 
-    badNodes = killedFilter.getValues(); 
+    filter = new HistoryViewer.FilteredJob(job, TaskStatus.State.KILLED.toString());
+    badNodes = filter.getFilteredMap(); 
     if (badNodes.size() > 0) {
  %>
 <h3>Killed tasks attempts by nodes </h3>
 <table border="1">
 <tr><td>Hostname</td><td>Killed Tasks</td></tr>
  <%	  
-      for (Map.Entry<String, Set<String>> entry : badNodes.entrySet()) {
+      for (Map.Entry<String, Set<TaskID>> entry : badNodes.entrySet()) {
         String node = entry.getKey();
-        Set<String> killedTasks = entry.getValue();
+        Set<TaskID> killedTasks = entry.getValue();
 %>
         <tr>
         <td><%=node %></td>
         <td>
 <%
-        for (String t : killedTasks) {
+        boolean firstId = true;
+        for (TaskID tid : killedTasks) {
+             if (firstId) {
+              firstId = false;
 %>
-          <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=encodedLogFileName%>&taskid=<%=t %>"><%=t %></a>,&nbsp;
+            <a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
 <%		  
+          } else {
+%>	
+            ,&nbsp<a href="taskdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>&taskid=<%=tid %>"><%=tid %></a>
+<%		  
+          }
         }
 %>	
         </td>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobhistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobhistory.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobhistory.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobhistory.jsp Sat Nov 28 20:26:01 2009
@@ -27,7 +27,7 @@
   import="javax.servlet.jsp.*"
   import="java.text.SimpleDateFormat"
   import="org.apache.hadoop.mapred.*"
-  import="org.apache.hadoop.mapred.JobHistory.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
 
 <%!	private static final long serialVersionUID = 1L;
@@ -76,26 +76,31 @@
     final String user = (parts.length >= 1)
                         ? parts[0].toLowerCase()
                         : "";
-    final String jobname = (parts.length >= 2)
+    final String jobid = (parts.length >= 2)
                            ? parts[1].toLowerCase()
                            : "";
+
     PathFilter jobLogFileFilter = new PathFilter() {
       private boolean matchUser(String fileName) {
         // return true if 
         //  - user is not specified
         //  - user matches
-        return "".equals(user) || user.equals(fileName.split("_")[5]);
+        return "".equals(user) || user.equals(fileName.split("_")[3]);
       }
 
-      private boolean matchJobName(String fileName) {
+      private boolean matchJobId(String fileName) {
         // return true if 
-        //  - jobname is not specified
-        //  - jobname contains the keyword
-        return "".equals(jobname) || fileName.split("_")[6].toLowerCase().contains(jobname);
+        //  - jobid is not specified
+        //  - jobid matches 
+        String[] jobDetails = fileName.split("_");
+        String actualId = jobDetails[0] + "_" +jobDetails[1] + "_" + jobDetails[2] ;
+        return "".equals(jobid) || jobid.equalsIgnoreCase(actualId);
       }
 
       public boolean accept(Path path) {
-        return !(path.getName().endsWith(".xml")) && matchUser(path.getName()) && matchJobName(path.getName());
+        return (!(path.getName().endsWith(".xml") || 
+          path.getName().endsWith(JobHistory.OLD_SUFFIX)) && 
+          matchUser(path.getName()) && matchJobId(path.getName()));
       }
     };
     
@@ -107,7 +112,7 @@
     }
     Path[] jobFiles = FileUtil.stat2Paths(fs.listStatus(new Path(historyLogDir),
                                           jobLogFileFilter));
-    out.println("<!--  user : " + user + ", jobname : " + jobname + "-->");
+    out.println("<!--  user : " + user + ", jobid : " + jobid + "-->");
     if (null == jobFiles || jobFiles.length == 0)  {
       out.println("No files found!"); 
       return ; 
@@ -146,10 +151,11 @@
     }
 
     // Display the search box
-    out.println("<form name=search><b> Filter (username:jobname) </b>"); // heading
+    out.println("<form name=search><b> Filter (username:jobid) </b>"); // heading
     out.println("<input type=text name=search size=\"20\" value=\"" + search + "\">"); // search box
     out.println("<input type=submit value=\"Filter!\" onClick=\"showUserHistory(document.getElementById('search').value)\"></form>");
-    out.println("<span class=\"small\">Example: 'smith' will display jobs either submitted by user 'smith'. 'smith:sort' will display jobs from user 'smith' having 'sort' keyword in the jobname.</span>"); // example
+    out.println("<span class=\"small\">Example: 'smith' will display jobs submitted by user 'smith'. </span>");
+    out.println("<span class=\"small\">Job Ids need to be prefixed with a colon(:) For example, :job_200908311030_0001 will display the job with that id. </span>"); // example 
     out.println("<hr>");
 
     //Show the status
@@ -164,8 +170,8 @@
     if (!"".equals(user)) {
       out.println(" for user <b>" + user + "</b>"); // show the user if present
     }
-    if (!"".equals(jobname)) {
-      out.println(" with jobname having the keyword <b>" + jobname + "</b> in it."); // show the jobname keyword if present
+    if (!"".equals(jobid)) {
+      out.println(" for jobid <b>" + jobid + "</b> in it."); // show the jobid keyword if present
     }
     out.print("</span></i>)");
 
@@ -192,12 +198,8 @@
         String dp1 = null;
         String dp2 = null;
         
-        try {
-          dp1 = JobHistory.JobInfo.decodeJobHistoryFileName(p1.getName());
-          dp2 = JobHistory.JobInfo.decodeJobHistoryFileName(p2.getName());
-        } catch (IOException ioe) {
-            throw new RuntimeException(ioe);
-        }
+        dp1 = p1.getName();
+        dp2 = p2.getName();
                 
         String[] split1 = dp1.split("_");
         String[] split2 = dp2.split("_");
@@ -206,12 +208,8 @@
         int res = new Date(Long.parseLong(split1[1])).compareTo(
                              new Date(Long.parseLong(split2[1])));
         if (res == 0) {
-          res = new Date(Long.parseLong(split1[3])).compareTo(
-                           new Date(Long.parseLong(split2[3])));
-        }
-        if (res == 0) {
-          Long l1 = Long.parseLong(split1[4]);
-          res = l1.compareTo(Long.parseLong(split2[4]));
+          Long l1 = Long.parseLong(split1[2]);
+          res = l1.compareTo(Long.parseLong(split2[2]));
         }
         return res;
       }
@@ -224,25 +222,18 @@
 
     out.print("<table align=center border=2 cellpadding=\"5\" cellspacing=\"2\">");
     out.print("<tr>");
-    out.print("<td>Job tracker Host Name</td>" +
-              "<td>Job tracker Start time</td>" +
-              "<td>Job Id</td><td>Name</td><td>User</td>") ; 
+    out.print( "<td>Job Id</td><td>User</td>") ; 
     out.print("</tr>"); 
     
     Set<String> displayedJobs = new HashSet<String>();
     for (int i = start - 1; i < start + length - 1; ++i) {
       Path jobFile = jobFiles[i];
       
-      String decodedJobFileName = 
-          JobHistory.JobInfo.decodeJobHistoryFileName(jobFile.getName());
+      String[] jobDetails = jobFile.getName().split("_");
+
+      String jobId = jobDetails[0] + "_" +jobDetails[1] + "_" + jobDetails[2] ;
+      String userName = jobDetails[3];
 
-      String[] jobDetails = decodedJobFileName.split("_");
-      String trackerHostName = jobDetails[0];
-      String trackerStartTime = jobDetails[1];
-      String jobId = jobDetails[2] + "_" +jobDetails[3] + "_" + jobDetails[4] ;
-      String userName = jobDetails[5];
-      String jobName = jobDetails[6];
-      
       // Check if the job is already displayed. There can be multiple job 
       // history files for jobs that have restarted
       if (displayedJobs.contains(jobId)) {
@@ -251,14 +242,10 @@
         displayedJobs.add(jobId);
       }
       
-      // Encode the logfile name again to cancel the decoding done by the browser
-      String encodedJobFileName = 
-          JobHistory.JobInfo.encodeJobHistoryFileName(jobFile.getName());
 %>
 <center>
 <%	
-      printJob(trackerHostName, trackerStartTime, jobId,
-               jobName, userName, new Path(jobFile.getParent(), encodedJobFileName), 
+      printJob(jobId, userName, new Path(jobFile.getParent(), jobFile), 
                out) ; 
 %>
 </center> 
@@ -270,16 +257,12 @@
     printNavigation(pageno, size, maxPageNo, search, out);
 %>
 <%!
-    private void printJob(String trackerHostName, String trackerid,
-                          String jobId, String jobName,
+    private void printJob(String jobId, 
                           String user, Path logFile, JspWriter out)
     throws IOException {
       out.print("<tr>"); 
-      out.print("<td>" + trackerHostName + "</td>"); 
-      out.print("<td>" + new Date(Long.parseLong(trackerid)) + "</td>"); 
       out.print("<td>" + "<a href=\"jobdetailshistory.jsp?jobid=" + jobId + 
                 "&logFile=" + logFile.toString() + "\">" + jobId + "</a></td>"); 
-      out.print("<td>" + jobName + "</td>"); 
       out.print("<td>" + user + "</td>"); 
       out.print("</tr>");
     }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobqueue_details.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobqueue_details.jsp?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobqueue_details.jsp (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/jobqueue_details.jsp Sat Nov 28 20:26:01 2009
@@ -1,22 +1,20 @@
-<%
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file 
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-%>
+<%/*
+       * Licensed to the Apache Software Foundation (ASF) under one
+       * or more contributor license agreements.  See the NOTICE file 
+       * distributed with this work for additional information
+       * regarding copyright ownership.  The ASF licenses this file
+       * to you under the Apache License, Version 2.0 (the
+       * "License"); you may not use this file except in compliance
+       * with the License.  You may obtain a copy of the License at
+       *
+       *     http://www.apache.org/licenses/LICENSE-2.0
+       *
+       * Unless required by applicable law or agreed to in writing, software
+       * distributed under the License is distributed on an "AS IS" BASIS,
+       * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+       * See the License for the specific language governing permissions and
+       * limitations under the License.
+       */%>
 <%@ page
   contentType="text/html; charset=UTF-8"
   import="javax.servlet.*"
@@ -27,28 +25,26 @@
   import="org.apache.hadoop.util.StringUtils"
   import="org.apache.hadoop.util.ServletUtil"
 %>
-<%!
-private static final long serialVersionUID = 526456771152222127L; 
-%>
+<%!private static final long serialVersionUID = 526456771152222127L;%>
 <%
-  JobTracker tracker = 
-    (JobTracker) application.getAttribute("job.tracker");
-  String trackerName = 
-    StringUtils.simpleHostname(tracker.getJobTrackerMachine());
-  String queueName = 
-    StringUtils.escapeHTML(request.getParameter("queueName"));
+  JobTracker tracker = (JobTracker) application.getAttribute("job.tracker");
+  String trackerName = StringUtils.simpleHostname(tracker
+      .getJobTrackerMachine());
+  String queueName = StringUtils.escapeHTML(request
+      .getParameter("queueName"));
   TaskScheduler scheduler = tracker.getTaskScheduler();
-  Collection<JobInProgress> jobs = scheduler.getJobs(queueName);
   JobQueueInfo schedInfo = tracker.getQueueInfo(queueName);
 %>
 <html>
 <head>
-<title>Queue details for <%=queueName!=null?queueName:""%> </title>
+<title>Queue details for <%=queueName != null ? queueName : ""%> </title>
 <link rel="stylesheet" type="text/css" href="/static/hadoop.css">
 <script type="text/javascript" src="/static/jobtracker.js"></script>
 </head>
 <body>
-<% JSPUtil.processButtons(request, response, tracker); %>
+<%
+  JSPUtil.processButtons(request, response, tracker);
+%>
 <%
   String schedulingInfoString = schedInfo.getSchedulingInfo();
 %>
@@ -56,35 +52,52 @@
   <a href="jobtracker.jsp"><%=trackerName%></a>
 </h1>
 <div>
-State : <%= schedInfo.getQueueState() %> <br/>
-Scheduling Information : <%= schedulingInfoString.replaceAll("\n","<br/>") %>
+Scheduling Information : <%=schedulingInfoString.replaceAll("\n", "<br/>")%>
 </div>
 <hr/>
 <%
-if(jobs == null || jobs.isEmpty()) {
+  if (schedInfo.getChildren() != null && schedInfo.getChildren().size() > 0) {
+%>
+Child Queues : 
+<%
+    for (JobQueueInfo childQueue : schedInfo.getChildren()) {
+      String[] childNameSplits = childQueue.getQueueName().split(":");
+      String childName = childNameSplits[childNameSplits.length -1];
+%>
+      <a href="jobqueue_details.jsp?queueName=<%=childQueue.getQueueName()%>">
+      <%=childName%></a>&nbsp &nbsp
+<%
+    }
+%>
+<br/>
+<%
+  } else {
+    Collection<JobInProgress> jobs = scheduler.getJobs(queueName);
+    if (jobs == null || jobs.isEmpty()) {
 %>
 <center>
-<h2> No Jobs found for the Queue :: <%=queueName!=null?queueName:""%> </h2>
+<h2> No Jobs found for the Queue :: <%=queueName != null ? queueName : ""%> </h2>
 <hr/>
 </center>
 <%
-}else {
+  } else {
 %>
 <center>
-<h2> Job Summary for the Queue :: <%=queueName!=null?queueName:"" %> </h2>
+<h2> Job Summary for the Queue :: <%=queueName != null ? queueName : ""%> </h2>
 </center>
 <div style="text-align: center;text-indent: center;font-style: italic;">
 (In the order maintained by the scheduler)
 </div>
 <br/>
 <hr/>
-<%=
-  JSPUtil.generateJobTable("Job List", jobs, 30, 0)
-%>
+<%=JSPUtil.generateJobTable("Job List", jobs, 30, 0)%>
 <hr>
-<% } %>
+<%
+  }
+  }
+%>
 
 <%
-out.println(ServletUtil.htmlFooter());
+  out.println(ServletUtil.htmlFooter());
 %>
 



Mime
View raw message