hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r588064 - in /lucene/hadoop/trunk: ./ src/c++/pipes/api/hadoop/ src/c++/pipes/impl/ src/examples/pipes/ src/examples/pipes/impl/ src/java/org/apache/hadoop/mapred/ src/webapps/job/
Date Wed, 24 Oct 2007 22:43:16 GMT
Author: omalley
Date: Wed Oct 24 15:43:14 2007
New Revision: 588064

URL: http://svn.apache.org/viewvc?rev=588064&view=rev
Log:
HADOOP-1245.  Use the mapred.tasktracker.tasks.maximum value configured on each
node rather than the global number configured on the job tracker. Contributed 
by Michael Bieniosek.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/c++/pipes/api/hadoop/Pipes.hh
    lucene/hadoop/trunk/src/c++/pipes/api/hadoop/TemplateFactory.hh
    lucene/hadoop/trunk/src/c++/pipes/impl/HadoopPipes.cc
    lucene/hadoop/trunk/src/examples/pipes/Makefile.am
    lucene/hadoop/trunk/src/examples/pipes/Makefile.in
    lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc
    lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc
    lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
    lucene/hadoop/trunk/src/webapps/job/jobtracker.jsp
    lucene/hadoop/trunk/src/webapps/job/machines.jsp

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Oct 24 15:43:14 2007
@@ -3,6 +3,13 @@
 
 Trunk (unreleased changes)
 
+  INCOMPATIBLE CHANGES
+
+    HADOOP-1245.  Use the mapred.tasktracker.tasks.maximum value
+    configured on each tasktracker when allocating tasks, instead of
+    the value configured on the jobtracker. InterTrackerProtocol
+    version changed from 5 to 6. (Michael Bieniosek via omalley)
+
   IMPROVEMENTS
 
     HADOOP-2045.  Change committer list on website to a table, so that

Modified: lucene/hadoop/trunk/src/c++/pipes/api/hadoop/Pipes.hh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/api/hadoop/Pipes.hh?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/pipes/api/hadoop/Pipes.hh (original)
+++ lucene/hadoop/trunk/src/c++/pipes/api/hadoop/Pipes.hh Wed Oct 24 15:43:14 2007
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #ifndef HADOOP_PIPES_HH
 #define HADOOP_PIPES_HH
 

Modified: lucene/hadoop/trunk/src/c++/pipes/api/hadoop/TemplateFactory.hh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/api/hadoop/TemplateFactory.hh?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/pipes/api/hadoop/TemplateFactory.hh (original)
+++ lucene/hadoop/trunk/src/c++/pipes/api/hadoop/TemplateFactory.hh Wed Oct 24 15:43:14 2007
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #ifndef HADOOP_PIPES_TEMPLATE_FACTORY_HH
 #define HADOOP_PIPES_TEMPLATE_FACTORY_HH
 

Modified: lucene/hadoop/trunk/src/c++/pipes/impl/HadoopPipes.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/impl/HadoopPipes.cc?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/pipes/impl/HadoopPipes.cc (original)
+++ lucene/hadoop/trunk/src/c++/pipes/impl/HadoopPipes.cc Wed Oct 24 15:43:14 2007
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include "hadoop/Pipes.hh"
 #include "hadoop/SerialUtils.hh"
 #include "hadoop/StringUtils.hh"

Modified: lucene/hadoop/trunk/src/examples/pipes/Makefile.am
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/pipes/Makefile.am?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/pipes/Makefile.am (original)
+++ lucene/hadoop/trunk/src/examples/pipes/Makefile.am Wed Oct 24 15:43:14 2007
@@ -19,7 +19,7 @@
 LDADD=-L$(HADOOP_UTILS_PREFIX)/lib -L$(HADOOP_PIPES_PREFIX)/lib \
       -lhadooppipes -lhadooputils
 
-bin_PROGRAMS= wordcount-simple wordcount-part wordcount-nopipe
+bin_PROGRAMS= wordcount-simple wordcount-part wordcount-nopipe pipes-sort
 
 # Define the sources for each program
 wordcount_simple_SOURCES = \
@@ -30,4 +30,7 @@
 
 wordcount_nopipe_SOURCES = \
 	impl/wordcount-nopipe.cc
+
+pipes_sort_SOURCES = \
+        impl/sort.cc
 

Modified: lucene/hadoop/trunk/src/examples/pipes/Makefile.in
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/pipes/Makefile.in?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/pipes/Makefile.in (original)
+++ lucene/hadoop/trunk/src/examples/pipes/Makefile.in Wed Oct 24 15:43:14 2007
@@ -14,7 +14,7 @@
 
 @SET_MAKE@
 
-SOURCES = $(wordcount_nopipe_SOURCES) $(wordcount_part_SOURCES) $(wordcount_simple_SOURCES)
+SOURCES = $(pipes_sort_SOURCES) $(wordcount_nopipe_SOURCES) $(wordcount_part_SOURCES) $(wordcount_simple_SOURCES)
 
 srcdir = @srcdir@
 top_srcdir = @top_srcdir@
@@ -38,7 +38,7 @@
 POST_UNINSTALL = :
 host_triplet = @host@
 bin_PROGRAMS = wordcount-simple$(EXEEXT) wordcount-part$(EXEEXT) \
-	wordcount-nopipe$(EXEEXT)
+	wordcount-nopipe$(EXEEXT) pipes-sort$(EXEEXT)
 DIST_COMMON = config.guess config.sub $(srcdir)/Makefile.in \
 	$(srcdir)/Makefile.am $(top_srcdir)/configure \
 	$(am__configure_deps) $(top_srcdir)/impl/config.h.in depcomp \
@@ -59,6 +59,10 @@
 binPROGRAMS_INSTALL = $(INSTALL_PROGRAM)
 PROGRAMS = $(bin_PROGRAMS)
 am__dirstamp = $(am__leading_dot)dirstamp
+am_pipes_sort_OBJECTS = impl/sort.$(OBJEXT)
+pipes_sort_OBJECTS = $(am_pipes_sort_OBJECTS)
+pipes_sort_LDADD = $(LDADD)
+pipes_sort_DEPENDENCIES =
 am_wordcount_nopipe_OBJECTS = impl/wordcount-nopipe.$(OBJEXT)
 wordcount_nopipe_OBJECTS = $(am_wordcount_nopipe_OBJECTS)
 wordcount_nopipe_LDADD = $(LDADD)
@@ -82,8 +86,8 @@
 CXXLD = $(CXX)
 CXXLINK = $(LIBTOOL) --mode=link --tag=CXX $(CXXLD) $(AM_CXXFLAGS) \
 	$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
-SOURCES = $(wordcount_nopipe_SOURCES) $(wordcount_part_SOURCES) \
-	$(wordcount_simple_SOURCES)
+SOURCES = $(pipes_sort_SOURCES) $(wordcount_nopipe_SOURCES) \
+	$(wordcount_part_SOURCES) $(wordcount_simple_SOURCES)
 ETAGS = etags
 CTAGS = ctags
 ACLOCAL = @ACLOCAL@
@@ -218,6 +222,9 @@
 wordcount_nopipe_SOURCES = \
 	impl/wordcount-nopipe.cc
 
+pipes_sort_SOURCES = \
+        impl/sort.cc
+
 all: all-am
 
 .SUFFIXES:
@@ -306,6 +313,11 @@
 impl/$(DEPDIR)/$(am__dirstamp):
 	@$(mkdir_p) impl/$(DEPDIR)
 	@: > impl/$(DEPDIR)/$(am__dirstamp)
+impl/sort.$(OBJEXT): impl/$(am__dirstamp) \
+	impl/$(DEPDIR)/$(am__dirstamp)
+pipes-sort$(EXEEXT): $(pipes_sort_OBJECTS) $(pipes_sort_DEPENDENCIES) 
+	@rm -f pipes-sort$(EXEEXT)
+	$(CXXLINK) $(pipes_sort_LDFLAGS) $(pipes_sort_OBJECTS) $(pipes_sort_LDADD) $(LIBS)
 impl/wordcount-nopipe.$(OBJEXT): impl/$(am__dirstamp) \
 	impl/$(DEPDIR)/$(am__dirstamp)
 wordcount-nopipe$(EXEEXT): $(wordcount_nopipe_OBJECTS) $(wordcount_nopipe_DEPENDENCIES) 
@@ -324,6 +336,7 @@
 
 mostlyclean-compile:
 	-rm -f *.$(OBJEXT)
+	-rm -f impl/sort.$(OBJEXT)
 	-rm -f impl/wordcount-nopipe.$(OBJEXT)
 	-rm -f impl/wordcount-part.$(OBJEXT)
 	-rm -f impl/wordcount-simple.$(OBJEXT)
@@ -331,6 +344,7 @@
 distclean-compile:
 	-rm -f *.tab.c
 
+@AMDEP_TRUE@@am__include@ @am__quote@impl/$(DEPDIR)/sort.Po@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@impl/$(DEPDIR)/wordcount-nopipe.Po@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@impl/$(DEPDIR)/wordcount-part.Po@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@impl/$(DEPDIR)/wordcount-simple.Po@am__quote@

Modified: lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc (original)
+++ lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc Wed Oct 24 15:43:14 2007
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #include "hadoop/Pipes.hh"
 #include "hadoop/TemplateFactory.hh"
 #include "hadoop/StringUtils.hh"

Modified: lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc (original)
+++ lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc Wed Oct 24 15:43:14 2007
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include "hadoop/Pipes.hh"
 #include "hadoop/TemplateFactory.hh"
 #include "hadoop/StringUtils.hh"

Modified: lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc (original)
+++ lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc Wed Oct 24 15:43:14 2007
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include "hadoop/Pipes.hh"
 #include "hadoop/TemplateFactory.hh"
 #include "hadoop/StringUtils.hh"

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java Wed Oct 24 15:43:14 2007
@@ -34,8 +34,9 @@
    * version 4 changed TaskReport for HADOOP-549.
    * version 5 introduced that removes locateMapOutputs and instead uses
    * getTaskCompletionEvents to figure finished maps and fetch the outputs
+   * version 6 adds maxTasks to TaskTrackerStatus for HADOOP-1245
    */
-  public static final long versionID = 5L;
+  public static final long versionID = 6L;
   
   public final static int TRACKERS_OK = 0;
   public final static int UNKNOWN_TASKTRACKER = 1;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java Wed Oct 24 15:43:14 2007
@@ -71,17 +71,23 @@
    * Record types are identifiers for each line of log in history files. 
    * A record type appears as the first token in a single line of log. 
    */
-  public static enum RecordTypes {Jobtracker, Job, Task, MapAttempt, ReduceAttempt};
+  public static enum RecordTypes {
+    Jobtracker, Job, Task, MapAttempt, ReduceAttempt
+  }
+
   /**
    * Job history files contain key="value" pairs, where keys belong to this enum. 
    * It acts as a global namespace for all keys. 
    */
-  public static enum Keys { JOBTRACKERID,
-                            START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME, LAUNCH_TIME, 
-                            TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES, FINISHED_MAPS, FINISHED_REDUCES,
-                            JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE, ERROR, TASK_ATTEMPT_ID, TASK_STATUS, 
-                            COPY_PHASE, SORT_PHASE, REDUCE_PHASE, SHUFFLE_FINISHED, SORT_FINISHED 
-  };
+  public static enum Keys { 
+    JOBTRACKERID,
+    START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME, 
+    LAUNCH_TIME, TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES, 
+    FINISHED_MAPS, FINISHED_REDUCES, JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE, 
+    ERROR, TASK_ATTEMPT_ID, TASK_STATUS, COPY_PHASE, SORT_PHASE, REDUCE_PHASE, 
+    SHUFFLE_FINISHED, SORT_FINISHED, COUNTERS
+  }
+
   /**
    * This enum contains some of the values commonly used by history log events. 
    * since values in history can only be strings - Values.name() is used in 
@@ -89,7 +95,8 @@
    */
   public static enum Values {
     SUCCESS, FAILED, KILLED, MAP, REDUCE
-  };
+  }
+
   // temp buffer for parsed dataa
   private static Map<Keys,String> parseBuffer = new HashMap<Keys, String>(); 
 
@@ -181,7 +188,8 @@
    * @param value value
    */
   
-  static void log(PrintWriter out, RecordTypes recordType, Enum key, String value){
+  static void log(PrintWriter out, RecordTypes recordType, Keys key, 
+                  String value){
     out.println(recordType.name() + DELIMITER + key + "=\"" + value + "\""); 
     out.flush();
   }
@@ -194,7 +202,8 @@
    * @param values type of log event
    */
 
-  static void log(PrintWriter out, RecordTypes recordType, Enum[] keys, String[] values){
+  static void log(PrintWriter out, RecordTypes recordType, Keys[] keys, 
+                  String[] values){
     StringBuffer buf = new StringBuffer(recordType.name()); 
     buf.append(DELIMITER); 
     for(int i =0; i< keys.length; i++){
@@ -340,7 +349,7 @@
       if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job, 
-                         new Enum[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
+                         new Keys[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
                          new String[]{jobId, jobName, user, 
                                       String.valueOf(submitTime), jobConfPath}
                         );
@@ -354,7 +363,7 @@
           openJobs.put(logFileName, writer);
           // add to writer as well 
           JobHistory.log(writer, RecordTypes.Job, 
-                         new Enum[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
+                         new Keys[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
                          new String[]{jobId, jobName, user, 
                                       String.valueOf(submitTime) , jobConfPath}
                         ); 
@@ -389,7 +398,7 @@
       if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job, 
-                         new Enum[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
+                         new Keys[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
                          new String[] {jobId,  String.valueOf(startTime), 
                                        String.valueOf(totalMaps), String.valueOf(totalReduces) }); 
         }
@@ -399,7 +408,7 @@
         
         if (null != writer){
           JobHistory.log(writer, RecordTypes.Job, 
-                         new Enum[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
+                         new Keys[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
                          new String[] {jobId,  String.valueOf(startTime), String.valueOf(totalMaps), String.valueOf(totalReduces)}); 
         }
       }
@@ -412,15 +421,22 @@
      * @param finishedReduces no of reduces finished sucessfully. 
      * @param failedMaps no of failed map tasks. 
      * @param failedReduces no of failed reduce tasks. 
+     * @param counters the counters from the job
      */ 
-    public static void logFinished(String jobId, long finishTime, int finishedMaps, int finishedReduces,
-                                   int failedMaps, int failedReduces){
+    public static void logFinished(String jobId, long finishTime, 
+                                   int finishedMaps, int finishedReduces,
+                                   int failedMaps, int failedReduces,
+                                   Counters counters){
       if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job,          
-                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
-                         new String[] {jobId,  "" + finishTime, Values.SUCCESS.name(), 
-                                       String.valueOf(finishedMaps), String.valueOf(finishedReduces) });
+                         new Keys[] {Keys.JOBID, Keys.FINISH_TIME, 
+                                     Keys.JOB_STATUS, Keys.FINISHED_MAPS, 
+                                     Keys.FINISHED_REDUCES},
+                         new String[] {jobId,  "" + finishTime, 
+                                       Values.SUCCESS.name(), 
+                                       String.valueOf(finishedMaps), 
+                                       String.valueOf(finishedReduces)});
         }
         
         // close job file for this job
@@ -428,11 +444,18 @@
         PrintWriter writer = openJobs.get(logFileName); 
         if (null != writer){
           JobHistory.log(writer, RecordTypes.Job,          
-                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES,
-                                     Keys.FAILED_MAPS, Keys.FAILED_REDUCES},
-                         new String[] {jobId,  "" + finishTime, Values.SUCCESS.name(), 
-                                       String.valueOf(finishedMaps), String.valueOf(finishedReduces),
-                                       String.valueOf(failedMaps), String.valueOf(failedReduces)});
+                         new Keys[] {Keys.JOBID, Keys.FINISH_TIME, 
+                                     Keys.JOB_STATUS, Keys.FINISHED_MAPS, 
+                                     Keys.FINISHED_REDUCES,
+                                     Keys.FAILED_MAPS, Keys.FAILED_REDUCES,
+                                     Keys.COUNTERS},
+                         new String[] {jobId,  Long.toString(finishTime), 
+                                       Values.SUCCESS.name(), 
+                                       String.valueOf(finishedMaps), 
+                                       String.valueOf(finishedReduces),
+                                       String.valueOf(failedMaps), 
+                                       String.valueOf(failedReduces),
+                                       stringifyCounters(counters)});
           writer.close();
           openJobs.remove(logFileName); 
         }
@@ -451,7 +474,7 @@
       if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job,
-                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
+                         new Keys[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
                          new String[] {jobid,  String.valueOf(timestamp), Values.FAILED.name(), String.valueOf(finishedMaps), 
                                        String.valueOf(finishedReduces)}); 
         }
@@ -459,7 +482,7 @@
         PrintWriter writer = openJobs.get(logFileName); 
         if (null != writer){
           JobHistory.log(writer, RecordTypes.Job,
-                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
+                         new Keys[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
                          new String[] {jobid,  String.valueOf(timestamp), Values.FAILED.name(), String.valueOf(finishedMaps), 
                                        String.valueOf(finishedReduces)}); 
           writer.close();
@@ -488,7 +511,8 @@
       if (!disableHistory){
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
         if (null != writer){
-          JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE , Keys.START_TIME}, 
+          JobHistory.log(writer, RecordTypes.Task, 
+                         new Keys[]{Keys.TASKID, Keys.TASK_TYPE , Keys.START_TIME}, 
                          new String[]{taskId, taskType, String.valueOf(startTime)});
         }
       }
@@ -501,13 +525,17 @@
      * @param finishTime finish timeof task in ms
      */
     public static void logFinished(String jobId, String taskId, String taskType, 
-                                   long finishTime){
+                                   long finishTime, Counters counters){
       if (!disableHistory){
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
         if (null != writer){
-          JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE, 
-                                                              Keys.TASK_STATUS, Keys.FINISH_TIME}, 
-                         new String[]{ taskId, taskType, Values.SUCCESS.name(), String.valueOf(finishTime)});
+          JobHistory.log(writer, RecordTypes.Task, 
+                         new Keys[]{Keys.TASKID, Keys.TASK_TYPE, 
+                                    Keys.TASK_STATUS, Keys.FINISH_TIME,
+                                    Keys.COUNTERS}, 
+                         new String[]{ taskId, taskType, Values.SUCCESS.name(), 
+                                       String.valueOf(finishTime),
+                                       stringifyCounters(counters)});
         }
       }
     }
@@ -523,8 +551,9 @@
       if (!disableHistory){
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
         if (null != writer){
-          JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE, 
-                                                              Keys.TASK_STATUS, Keys.FINISH_TIME, Keys.ERROR}, 
+          JobHistory.log(writer, RecordTypes.Task, 
+                         new Keys[]{Keys.TASKID, Keys.TASK_TYPE, 
+                                    Keys.TASK_STATUS, Keys.FINISH_TIME, Keys.ERROR}, 
                          new String[]{ taskId,  taskType, Values.FAILED.name(), String.valueOf(time) , error});
         }
       }
@@ -560,8 +589,9 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.MapAttempt, 
-                         new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, 
-                                     Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
+                         new Keys[]{ Keys.TASK_TYPE, Keys.TASKID, 
+                                     Keys.TASK_ATTEMPT_ID, Keys.START_TIME, 
+                                     Keys.HOSTNAME},
                          new String[]{Values.MAP.name(),  taskId, 
                                       taskAttemptId, String.valueOf(startTime), hostName}); 
         }
@@ -575,18 +605,22 @@
      * @param finishTime finish time
      * @param hostName host name 
      */
-    public static void logFinished(String jobId, String taskId, String taskAttemptId, long finishTime, String hostName){
+    public static void logFinished(String jobId, String taskId, 
+                                   String taskAttemptId, long finishTime, 
+                                   String hostName){
       if (!disableHistory){
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.MapAttempt, 
-                         new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                         new Keys[]{ Keys.TASK_TYPE, Keys.TASKID, 
+                                     Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
                                      Keys.FINISH_TIME, Keys.HOSTNAME},
                          new String[]{Values.MAP.name(), taskId, taskAttemptId, Values.SUCCESS.name(),  
                                       String.valueOf(finishTime), hostName}); 
         }
       }
     }
+
     /**
      * Log task attempt failed event.  
      * @param jobId jobid
@@ -602,7 +636,7 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.MapAttempt, 
-                         new Enum[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                         new Keys[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
                                     Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR},
                          new String[]{ Values.MAP.name(), taskId, taskAttemptId, Values.FAILED.name(),
                                        String.valueOf(timestamp), hostName, error}); 
@@ -624,7 +658,7 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.MapAttempt, 
-                         new Enum[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                         new Keys[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
                                     Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR},
                          new String[]{ Values.MAP.name(), taskId, taskAttemptId, Values.KILLED.name(),
                                        String.valueOf(timestamp), hostName, error}); 
@@ -651,7 +685,7 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.ReduceAttempt, 
-                         new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, 
+                         new Keys[]{  Keys.TASK_TYPE, Keys.TASKID, 
                                       Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
                          new String[]{Values.REDUCE.name(),  taskId, 
                                       taskAttemptId, String.valueOf(startTime), hostName}); 
@@ -668,14 +702,18 @@
      * @param finishTime finish time of task
      * @param hostName host name where task attempt executed
      */
-    public static void logFinished(String jobId, String taskId, String taskAttemptId, 
-                                   long shuffleFinished, long sortFinished, long finishTime, String hostName){
+    public static void logFinished(String jobId, String taskId, 
+                                   String taskAttemptId, long shuffleFinished, 
+                                   long sortFinished, long finishTime, 
+                                   String hostName){
       if (!disableHistory){
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.ReduceAttempt, 
-                         new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
-                                     Keys.SHUFFLE_FINISHED, Keys.SORT_FINISHED, Keys.FINISH_TIME, Keys.HOSTNAME},
+                         new Keys[]{ Keys.TASK_TYPE, Keys.TASKID, 
+                                     Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                                     Keys.SHUFFLE_FINISHED, Keys.SORT_FINISHED,
+                                     Keys.FINISH_TIME, Keys.HOSTNAME},
                          new String[]{Values.REDUCE.name(),  taskId, taskAttemptId, Values.SUCCESS.name(), 
                                       String.valueOf(shuffleFinished), String.valueOf(sortFinished),
                                       String.valueOf(finishTime), hostName}); 
@@ -697,7 +735,7 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.ReduceAttempt, 
-                         new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                         new Keys[]{  Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
                                       Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR },
                          new String[]{ Values.REDUCE.name(), taskId, taskAttemptId, Values.FAILED.name(), 
                                        String.valueOf(timestamp), hostName, error }); 
@@ -719,8 +757,10 @@
         PrintWriter writer = openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
         if (null != writer){
           JobHistory.log(writer, RecordTypes.ReduceAttempt, 
-                         new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
-                                      Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR },
+                         new Keys[]{  Keys.TASK_TYPE, Keys.TASKID, 
+                                      Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                                      Keys.FINISH_TIME, Keys.HOSTNAME, 
+                                      Keys.ERROR },
                          new String[]{ Values.REDUCE.name(), taskId, taskAttemptId, Values.KILLED.name(), 
                                        String.valueOf(timestamp), hostName, error }); 
         }
@@ -728,6 +768,33 @@
     }
 
   }
+  
+  /**
+   * Convert a counters object into a string
+   * @param counters the counters to stringify
+   * @return the resulting string
+   */
+  private static String stringifyCounters(Counters counters) {
+    StringBuffer buffer = new StringBuffer();
+    for(String groupName: counters.getGroupNames()){
+      Counters.Group group = counters.getGroup(groupName);
+      boolean first = true;
+      for(String counterName: group.getCounterNames()) {
+        if (first) {
+          first = false;
+        } else {
+          buffer.append(',');
+        }
+        buffer.append(groupName);
+        buffer.append('.');
+        buffer.append(counterName);
+        buffer.append('=');
+        buffer.append(group.getCounter(counterName));
+      }
+    }
+    return buffer.toString();
+  }
+
   /**
    * Callback interface for reading back log events from JobHistory. This interface 
    * should be implemented and passed to JobHistory.parseHistory() 
@@ -782,16 +849,21 @@
           // find job that started more than one month back and remove them
           // for jobtracker instances which dont have a job in past one month 
           // remove the jobtracker start timestamp as well.
-          for (Map<String, JobHistory.JobInfo> jobs : 
-                  jobTrackersToJobs.values()) {
-            for(Iterator iter = jobs.keySet().iterator(); iter.hasNext(); iter.next()){
-              JobHistory.JobInfo job = jobs.get(iter.next());
-              if (now - job.getLong(Keys.SUBMIT_TIME) > THIRTY_DAYS_IN_MS) {
-                iter.remove(); 
-              }
-              if (jobs.size() == 0){
-                iter.remove(); 
+          Iterator<Map<String, JobHistory.JobInfo>> jobTrackerItr =
+            jobTrackersToJobs.values().iterator();
+          while (jobTrackerItr.hasNext()) {
+            Map<String, JobHistory.JobInfo> jobs = jobTrackerItr.next();
+            Iterator<Map.Entry<String, JobHistory.JobInfo>> jobItr = 
+                   jobs.entrySet().iterator();
+            while (jobItr.hasNext()) {
+              Map.Entry<String, JobHistory.JobInfo> item = jobItr.next();
+              if (now - item.getValue().getLong(Keys.SUBMIT_TIME) > 
+                  THIRTY_DAYS_IN_MS) {
+                jobItr.remove(); 
               }
+            }
+            if (jobs.size() == 0){
+              jobTrackerItr.remove(); 
             }
           }
           masterIndex.close(); 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Wed Oct 24 15:43:14 2007
@@ -797,7 +797,8 @@
                                         tip.getTIPId(), status.getTaskId(), status.getFinishTime(), 
                                         taskTrackerName); 
       JobHistory.Task.logFinished(profile.getJobId(), tip.getTIPId(), 
-                                  Values.MAP.name(), status.getFinishTime()); 
+                                  Values.MAP.name(), status.getFinishTime(),
+                                  status.getCounters()); 
     }else{
       JobHistory.ReduceAttempt.logStarted(profile.getJobId(), 
                                           tip.getTIPId(), status.getTaskId(), status.getStartTime(), 
@@ -807,7 +808,8 @@
                                            status.getSortFinishTime(), status.getFinishTime(), 
                                            taskTrackerName); 
       JobHistory.Task.logFinished(profile.getJobId(), tip.getTIPId(), 
-                                  Values.REDUCE.name(), status.getFinishTime()); 
+                                  Values.REDUCE.name(), status.getFinishTime(),
+                                  status.getCounters()); 
     }
         
     // Update the running/finished map/reduce counts
@@ -875,7 +877,9 @@
       LOG.info("Job " + this.status.getJobId() + 
                " has completed successfully.");
       JobHistory.JobInfo.logFinished(this.status.getJobId(), finishTime, 
-                                     this.finishedMapTasks, this.finishedReduceTasks, failedMapTasks, failedReduceTasks);
+                                     this.finishedMapTasks, 
+                                     this.finishedReduceTasks, failedMapTasks, 
+                                     failedReduceTasks, getCounters());
       metrics.completeJob();
       return true;
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Wed Oct 24 15:43:14 2007
@@ -507,7 +507,7 @@
   long startTime;
   int totalSubmissions = 0;
 
-  private int maxCurrentTasks;
+  private int totalTaskCapacity;
   private HostsFileReader hostsReader;
 
   //
@@ -621,7 +621,6 @@
     //
     TASKTRACKER_EXPIRY_INTERVAL = 
       conf.getLong("mapred.tasktracker.expiry.interval", 10 * 60 * 1000);
-    maxCurrentTasks = conf.getInt("mapred.tasktracker.tasks.maximum", 2);
     RETIRE_JOB_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.interval", 24 * 60 * 60 * 1000);
     RETIRE_JOB_CHECK_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.check", 60 * 1000);
     TASK_ALLOC_EPSILON = conf.getFloat("mapred.jobtracker.taskalloc.loadbalance.epsilon", 0.2f);
@@ -1231,6 +1230,7 @@
     if (oldStatus != null) {
       totalMaps -= oldStatus.countMapTasks();
       totalReduces -= oldStatus.countReduceTasks();
+      totalTaskCapacity -= oldStatus.getMaxTasks();
       if (status == null) {
         taskTrackers.remove(trackerName);
       }
@@ -1238,6 +1238,7 @@
     if (status != null) {
       totalMaps += status.countMapTasks();
       totalReduces += status.countReduceTasks();
+      totalTaskCapacity += status.getMaxTasks();
       taskTrackers.put(trackerName, status);
     }
     return oldStatus != null;
@@ -1297,7 +1298,7 @@
     int remainingMapLoad = 0;
     int numTaskTrackers;
     TaskTrackerStatus tts;
-	
+
     synchronized (taskTrackers) {
       numTaskTrackers = taskTrackers.size();
       tts = taskTrackers.get(taskTracker);
@@ -1306,7 +1307,6 @@
       LOG.warn("Unknown task tracker polling; ignoring: " + taskTracker);
       return null;
     }
-    int totalCapacity = numTaskTrackers * maxCurrentTasks;
 
     synchronized(jobsByPriority){
       for (Iterator it = jobsByPriority.iterator(); it.hasNext();) {
@@ -1320,6 +1320,8 @@
       }   
     }
 
+    int maxCurrentTasks = tts.getMaxTasks();
+    
     // find out the maximum number of maps or reduces that we are willing
     // to run on any node.
     int maxMapLoad = 0;
@@ -1381,7 +1383,7 @@
             padding = Math.min(maxCurrentTasks,
                                (int)(totalNeededMaps * PAD_FRACTION));
           }
-          if (totalMaps + padding >= totalCapacity) {
+          if (totalMaps + padding >= totalTaskCapacity) {
             break;
           }
         }
@@ -1419,7 +1421,7 @@
               Math.min(maxCurrentTasks,
                        (int) (totalNeededReduces * PAD_FRACTION));
           }
-          if (totalReduces + padding >= totalCapacity) {
+          if (totalReduces + padding >= totalTaskCapacity) {
             break;
           }
         }
@@ -1575,7 +1577,7 @@
       return new ClusterStatus(taskTrackers.size(),
                                totalMaps,
                                totalReduces,
-                               maxCurrentTasks,
+                               totalTaskCapacity,
                                state);          
     }
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Wed Oct 24 15:43:14 2007
@@ -391,7 +391,7 @@
     this.reduceTotal = 0;
     this.acceptNewTasks = true;
     this.status = null;
-        
+
     this.minSpaceStart = this.fConf.getLong("mapred.local.dir.minspacestart", 0L);
     this.minSpaceKill = this.fConf.getLong("mapred.local.dir.minspacekill", 0L);
     int numCopiers = this.fConf.getInt("mapred.reduce.parallel.copies", 5);
@@ -860,7 +860,7 @@
       synchronized (this) {
         status = new TaskTrackerStatus(taskTrackerName, localHostname, 
                                        httpPort, cloneAndResetRunningTaskStatuses(), 
-                                       failures); 
+                                       failures, maxCurrentTasks); 
       }
     } else {
       LOG.info("Resending 'status' to '" + jobTrackAddr.getHostName() +

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java Wed Oct 24 15:43:14 2007
@@ -46,6 +46,7 @@
   List<TaskStatus> taskReports;
     
   volatile long lastSeen;
+  int maxTasks;
     
   /**
    */
@@ -57,13 +58,15 @@
    */
   public TaskTrackerStatus(String trackerName, String host, 
                            int httpPort, List<TaskStatus> taskReports, 
-                           int failures) {
+                           int failures, int maxTasks) {
     this.trackerName = trackerName;
     this.host = host;
     this.httpPort = httpPort;
 
     this.taskReports = new ArrayList<TaskStatus>(taskReports);
     this.failures = failures;
+
+    this.maxTasks = maxTasks;
   }
 
   /**
@@ -149,6 +152,16 @@
     this.lastSeen = lastSeen;
   }
 
+  /**
+   * Get the maximum concurrent tasks for this node.  (This applies
+   * per type of task - a node with maxTasks==1 will run up to 1 map
+   * and 1 reduce concurrently).
+   * @return maximum tasks this node supports
+   */
+  public int getMaxTasks() {
+    return maxTasks;
+  }
+  
   ///////////////////////////////////////////
   // Writable
   ///////////////////////////////////////////
@@ -157,6 +170,7 @@
     UTF8.writeString(out, host);
     out.writeInt(httpPort);
     out.writeInt(failures);
+    out.writeInt(maxTasks);
 
     out.writeInt(taskReports.size());
     for (TaskStatus taskStatus : taskReports) {
@@ -169,6 +183,7 @@
     this.host = UTF8.readString(in);
     this.httpPort = in.readInt();
     this.failures = in.readInt();
+    this.maxTasks = in.readInt();
 
     taskReports.clear();
     int numTasks = in.readInt();

Modified: lucene/hadoop/trunk/src/webapps/job/jobtracker.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/jobtracker.jsp?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/webapps/job/jobtracker.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/job/jobtracker.jsp Wed Oct 24 15:43:14 2007
@@ -67,15 +67,21 @@
   public void generateSummaryTable(JspWriter out,
                                    JobTracker tracker) throws IOException {
     ClusterStatus status = tracker.getClusterStatus();
+    String tasksPerNode = status.getTaskTrackers() > 0 ?
+      percentFormat.format(((double)status.getMaxTasks()) / status.getTaskTrackers()) :
+      "-";
     out.print("<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\">\n"+
               "<tr><th>Maps</th><th>Reduces</th>" + 
-              "<th>Tasks/Node</th><th>Total Submissions</th>" +
-              "<th>Nodes</th></tr>\n");
+              "<th>Total Submissions</th>" +
+              "<th>Nodes</th><th>Task Capacity</th><th>Avg. Tasks/Node</th></tr>\n");
     out.print("<tr><td>" + status.getMapTasks() + "</td><td>" +
               status.getReduceTasks() + "</td><td>" + 
-              status.getMaxTasks() + "</td><td>" +
-              tracker.getTotalSubmissions() + "</td><td><a href=\"machines.jsp\">" +
-              status.getTaskTrackers() + "</a></td></tr></table>\n");
+              tracker.getTotalSubmissions() +
+              "</td><td><a href=\"machines.jsp\">" +
+              status.getTaskTrackers() +
+              "</a></td><td>" + status.getMaxTasks() +
+	      "</td><td>" + tasksPerNode +
+              "</td></tr></table>\n");
   }%>
 
 <%@page import="org.apache.hadoop.dfs.JspHelper"%>

Modified: lucene/hadoop/trunk/src/webapps/job/machines.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/machines.jsp?rev=588064&r1=588063&r2=588064&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/webapps/job/machines.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/job/machines.jsp Wed Oct 24 15:43:14 2007
@@ -23,9 +23,10 @@
     } else {
       out.print("<center>\n");
       out.print("<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\">\n");
-      out.print("<tr><td align=\"center\" colspan=\"5\"><b>Task Trackers</b></td></tr>\n");
+      out.print("<tr><td align=\"center\" colspan=\"6\"><b>Task Trackers</b></td></tr>\n");
       out.print("<tr><td><b>Name</b></td><td><b>Host</b></td>" +
-                "<td><b># running tasks</b></td><td><b>Failures</b></td>" +
+                "<td><b># running tasks</b></td><td><b>Max Tasks</b></td>" +
+                "<td><b>Failures</b></td>" +
                 "<td><b>Seconds since heartbeat</b></td></tr>\n");
       int maxFailures = 0;
       String failureKing = null;
@@ -49,6 +50,7 @@
         out.print(tt.getHost() + ":" + tt.getHttpPort() + "/\">");
         out.print(tt.getTrackerName() + "</a></td><td>");
         out.print(tt.getHost() + "</td><td>" + numCurTasks +
+                  "</td><td>" + tt.getMaxTasks() + 
                   "</td><td>" + numFailures + 
                   "</td><td>" + sinceHeartbeat + "</td></tr>\n");
       }



Mime
View raw message