hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1411007 [1/2] - in /hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project: ./ conf/ dev-support/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-clien...
Date Sun, 18 Nov 2012 22:31:46 GMT
Author: szetszwo
Date: Sun Nov 18 22:31:28 2012
New Revision: 1411007

URL: http://svn.apache.org/viewvc?rev=1411007&view=rev
Log:
Merge r1408927 through r1410997 from trunk

Added:
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/
      - copied from r1410997, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
      - copied unchanged from r1410997, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
Modified:
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptStatusUpdateEvent.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppView.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
    hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java

Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1408927-1410997

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt Sun Nov 18 22:31:28 2012
@@ -166,6 +166,8 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8911. CRLF characters in source and text files.
     (Raja Aluri via suresh)
 
+    MAPREDUCE-4723. Fix warnings found by findbugs 2. (Sandy Ryza via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -197,6 +199,9 @@ Release 2.0.3-alpha - Unreleased 
     MAPREDUCE-4777. In TestIFile, testIFileReaderWithCodec relies on
     testIFileWriterWithCodec. (Sandy Ryza via tomwhite)
 
+    MAPREDUCE-4800. Cleanup o.a.h.mapred.MapTaskStatus - remove unused 
+    code. (kkambatl via tucu)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES
@@ -573,6 +578,21 @@ Release 2.0.0-alpha - 05-23-2012
     MAPREDUCE-4444. nodemanager fails to start when one of the local-dirs is
     bad (Jason Lowe via bobby)
 
+Release 0.23.6 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    MAPREDUCE-4802. Takes a long time to load the task list on the AM for
+    large jobs (Ravi Prakash via bobby)
+
 Release 0.23.5 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -593,6 +613,9 @@ Release 0.23.5 - UNRELEASED
 
   OPTIMIZATIONS
 
+    MAPREDUCE-4720. Browser thinks History Server main page JS is taking too 
+    long (Ravi Prakash via bobby)
+
   BUG FIXES
 
     MAPREDUCE-4554. Job Credentials are not transmitted if security is turned 
@@ -660,8 +683,17 @@ Release 0.23.5 - UNRELEASED
 
     MAPREDUCE-4786. Job End Notification retry interval is 5 milliseconds by
     default (Ravi Prakash via bobby)
+
+    MAPREDUCE-4517. Too many INFO messages written out during AM to RM heartbeat
+    (Jason Lowe via tgraves)
+
+    MAPREDUCE-4797. LocalContainerAllocator can loop forever trying to contact
+    the RM (jlowe via bobby)
+
+    MAPREDUCE-4801. ShuffleHandler can generate large logs due to prematurely
+    closed channels (jlowe via bobby)
  
-Release 0.23.4 - UNRELEASED
+Release 0.23.4
 
   INCOMPATIBLE CHANGES
 

Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1408927-1410997

Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1408927-1410997

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml Sun Nov 18 22:31:28 2012
@@ -479,4 +479,28 @@
       <Field name="sslFileBufferSize" />
      <Bug pattern="IS2_INCONSISTENT_SYNC" />
    </Match> 
+   
+  <Match>
+    <Class name="org.apache.hadoop.mapreduce.util.ProcessTree" />
+     <Method name="sendSignal" />  
+    <Bug pattern="NP_GUARANTEED_DEREF_ON_EXCEPTION_PATH" />
+  </Match>
+   
+  <Match>
+    <Class name="org.apache.hadoop.mapreduce.util.ProcessTree" />
+     <Method name="isSetsidSupported" />  
+    <Bug pattern="NP_GUARANTEED_DEREF_ON_EXCEPTION_PATH" />
+  </Match>
+
+  <Match>
+    <Class name="org.apache.hadoop.mapreduce.util.ProcessTree" />
+     <Method name="isSetsidSupported" />
+    <Bug pattern="NP_NULL_ON_SOME_PATH_EXCEPTION" />
+  </Match>
+   
+  <Match>
+    <Class name="org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1" />
+    <Bug pattern="SE_BAD_FIELD_INNER_CLASS" />
+  </Match>
+   
  </FindBugsFilter>

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java Sun Nov 18 22:31:28 2012
@@ -359,9 +359,8 @@ public class LocalContainerLauncher exte
               + StringUtils.stringifyException(e));
         }
         // Report back any failures, for diagnostic purposes
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        exception.printStackTrace(new PrintStream(baos));
-        umbilical.reportDiagnosticInfo(classicAttemptID, baos.toString());
+        umbilical.reportDiagnosticInfo(classicAttemptID, 
+            StringUtils.stringifyException(exception));
         throw new RuntimeException();
 
       } catch (Throwable throwable) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java Sun Nov 18 22:31:28 2012
@@ -315,8 +315,6 @@ public class TaskAttemptListenerImpl ext
         + taskStatus.getProgress());
     // Task sends the updated state-string to the TT.
     taskAttemptStatus.stateString = taskStatus.getStateString();
-    // Set the output-size when map-task finishes. Set by the task itself.
-    taskAttemptStatus.outputSize = taskStatus.getOutputSize();
     // Task sends the updated phase to the TT.
     taskAttemptStatus.phase = TypeConverter.toYarn(taskStatus.getPhase());
     // Counters are updated by the task. Convert counters into new format as

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java Sun Nov 18 22:31:28 2012
@@ -184,10 +184,8 @@ class YarnChild {
         LOG.info("Exception cleaning up: " + StringUtils.stringifyException(e));
       }
       // Report back any failures, for diagnostic purposes
-      ByteArrayOutputStream baos = new ByteArrayOutputStream();
-      exception.printStackTrace(new PrintStream(baos));
       if (taskid != null) {
-        umbilical.fatalError(taskid, baos.toString());
+        umbilical.fatalError(taskid, StringUtils.stringifyException(exception));
       }
     } catch (Throwable throwable) {
       LOG.fatal("Error running child : "

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java Sun Nov 18 22:31:28 2012
@@ -600,6 +600,8 @@ public class JobHistoryEventHandler exte
       summary.setJobFinishTime(juce.getFinishTime());
       setSummarySlotSeconds(summary, context.getJob(jobId).getAllCounters());
       break;
+    default:
+      throw new YarnException("Invalid event type");
     }
   }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptStatusUpdateEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptStatusUpdateEvent.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptStatusUpdateEvent.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptStatusUpdateEvent.java Sun Nov 18 22:31:28 2012
@@ -49,7 +49,6 @@ public class TaskAttemptStatusUpdateEven
     public Counters counters;
     public String stateString;
     public Phase phase;
-    public long outputSize;
     public List<TaskAttemptId> fetchFailedMaps;
     public long mapFinishTime;
     public long shuffleFinishTime;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java Sun Nov 18 22:31:28 2012
@@ -833,6 +833,9 @@ public class JobImpl implements org.apac
         break;
       case SUCCEEDED:
         metrics.completedJob(this);
+        break;
+      default:
+        throw new IllegalArgumentException("Illegal job state: " + finalState);
     }
     return finalState;
   }
@@ -1311,6 +1314,9 @@ public class JobImpl implements org.apac
       case REDUCE:
         this.finalReduceCounters.incrAllCounters(counters);
         break;
+      default:
+        throw new IllegalStateException("Task type neither map nor reduce: " + 
+            t.getType());
       }
       this.fullCounters.incrAllCounters(counters);
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Sun Nov 18 22:31:28 2012
@@ -1335,6 +1335,8 @@ public abstract class TaskAttemptImpl im
               taskAttempt.attemptId,
               TaskEventType.T_ATTEMPT_KILLED));
           break;
+        default:
+          LOG.error("Task final state is not FAILED or KILLED: " + finalState);
       }
       if (taskAttempt.getLaunchTime() != 0) {
         TaskAttemptUnsuccessfulCompletionEvent tauce =

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java Sun Nov 18 22:31:28 2012
@@ -19,7 +19,6 @@
 package org.apache.hadoop.mapreduce.v2.app.local;
 
 import java.util.ArrayList;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -62,7 +61,6 @@ public class LocalContainerAllocator ext
 
   @SuppressWarnings("rawtypes")
   private final EventHandler eventHandler;
-  private AtomicInteger containerCount = new AtomicInteger();
   private long retryInterval;
   private long retrystartTime;
   private String nmHost;
@@ -102,9 +100,9 @@ public class LocalContainerAllocator ext
         this.applicationAttemptId, this.lastResponseID, super
             .getApplicationProgress(), new ArrayList<ResourceRequest>(),
         new ArrayList<ContainerId>());
-    AllocateResponse allocateResponse = scheduler.allocate(allocateRequest);
     AMResponse response;
     try {
+      AllocateResponse allocateResponse = scheduler.allocate(allocateRequest);
       response = allocateResponse.getAMResponse();
       // Reset retry count if no exception occurred.
       retrystartTime = System.currentTimeMillis();

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java Sun Nov 18 22:31:28 2012
@@ -67,6 +67,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.NodeState;
 import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.util.RackResolver;
 
@@ -145,6 +146,8 @@ public class RMContainerAllocator extend
   BlockingQueue<ContainerAllocatorEvent> eventQueue
     = new LinkedBlockingQueue<ContainerAllocatorEvent>();
 
+  private ScheduleStats scheduleStats = new ScheduleStats();
+
   public RMContainerAllocator(ClientService clientService, AppContext context) {
     super(clientService, context);
     this.stopped = new AtomicBoolean(false);
@@ -208,13 +211,10 @@ public class RMContainerAllocator extend
 
   @Override
   protected synchronized void heartbeat() throws Exception {
-    LOG.info("Before Scheduling: " + getStat());
+    scheduleStats.updateAndLogIfChanged("Before Scheduling: ");
     List<Container> allocatedContainers = getResources();
-    LOG.info("After Scheduling: " + getStat());
     if (allocatedContainers.size() > 0) {
-      LOG.info("Before Assign: " + getStat());
       scheduledRequests.assign(allocatedContainers);
-      LOG.info("After Assign: " + getStat());
     }
 
     int completedMaps = getJob().getCompletedMaps();
@@ -235,6 +235,8 @@ public class RMContainerAllocator extend
           maxReduceRampupLimit, reduceSlowStart);
       recalculateReduceSchedule = false;
     }
+
+    scheduleStats.updateAndLogIfChanged("After Scheduling: ");
   }
 
   @Override
@@ -245,7 +247,7 @@ public class RMContainerAllocator extend
     }
     eventHandlingThread.interrupt();
     super.stop();
-    LOG.info("Final Stats: " + getStat());
+    scheduleStats.log("Final Stats: ");
   }
 
   public boolean getIsReduceStarted() {
@@ -427,7 +429,9 @@ public class RMContainerAllocator extend
       return;
     }
     
-    LOG.info("Recalculating schedule...");
+    int headRoom = getAvailableResources() != null ?
+        getAvailableResources().getMemory() : 0;
+    LOG.info("Recalculating schedule, headroom=" + headRoom);
     
     //check for slow start
     if (!getIsReduceStarted()) {//not set yet
@@ -536,24 +540,6 @@ public class RMContainerAllocator extend
     }
   }
   
-  /**
-   * Synchronized to avoid findbugs warnings
-   */
-  private synchronized String getStat() {
-    return "PendingReduces:" + pendingReduces.size() +
-        " ScheduledMaps:" + scheduledRequests.maps.size() +
-        " ScheduledReduces:" + scheduledRequests.reduces.size() +
-        " AssignedMaps:" + assignedRequests.maps.size() + 
-        " AssignedReduces:" + assignedRequests.reduces.size() +
-        " completedMaps:" + getJob().getCompletedMaps() + 
-        " completedReduces:" + getJob().getCompletedReduces() +
-        " containersAllocated:" + containersAllocated +
-        " containersReleased:" + containersReleased +
-        " hostLocalAssigned:" + hostLocalAssigned + 
-        " rackLocalAssigned:" + rackLocalAssigned +
-        " availableResources(headroom):" + getAvailableResources();
-  }
-
   @SuppressWarnings("unchecked")
   private List<Container> getResources() throws Exception {
     int headRoom = getAvailableResources() != null ? getAvailableResources().getMemory() : 0;//first time it would be null
@@ -595,6 +581,9 @@ public class RMContainerAllocator extend
     if (newContainers.size() + finishedContainers.size() > 0 || headRoom != newHeadRoom) {
       //something changed
       recalculateReduceSchedule = true;
+      if (LOG.isDebugEnabled() && headRoom != newHeadRoom) {
+        LOG.debug("headroom=" + newHeadRoom);
+      }
     }
 
     if (LOG.isDebugEnabled()) {
@@ -1123,4 +1112,60 @@ public class RMContainerAllocator extend
       }
     }
   }
+
+  private class ScheduleStats {
+    int numPendingReduces;
+    int numScheduledMaps;
+    int numScheduledReduces;
+    int numAssignedMaps;
+    int numAssignedReduces;
+    int numCompletedMaps;
+    int numCompletedReduces;
+    int numContainersAllocated;
+    int numContainersReleased;
+
+    public void updateAndLogIfChanged(String msgPrefix) {
+      boolean changed = false;
+
+      // synchronized to fix findbug warnings
+      synchronized (RMContainerAllocator.this) {
+        changed |= (numPendingReduces != pendingReduces.size());
+        numPendingReduces = pendingReduces.size();
+        changed |= (numScheduledMaps != scheduledRequests.maps.size());
+        numScheduledMaps = scheduledRequests.maps.size();
+        changed |= (numScheduledReduces != scheduledRequests.reduces.size());
+        numScheduledReduces = scheduledRequests.reduces.size();
+        changed |= (numAssignedMaps != assignedRequests.maps.size());
+        numAssignedMaps = assignedRequests.maps.size();
+        changed |= (numAssignedReduces != assignedRequests.reduces.size());
+        numAssignedReduces = assignedRequests.reduces.size();
+        changed |= (numCompletedMaps != getJob().getCompletedMaps());
+        numCompletedMaps = getJob().getCompletedMaps();
+        changed |= (numCompletedReduces != getJob().getCompletedReduces());
+        numCompletedReduces = getJob().getCompletedReduces();
+        changed |= (numContainersAllocated != containersAllocated);
+        numContainersAllocated = containersAllocated;
+        changed |= (numContainersReleased != containersReleased);
+        numContainersReleased = containersReleased;
+      }
+
+      if (changed) {
+        log(msgPrefix);
+      }
+    }
+
+    public void log(String msgPrefix) {
+        LOG.info(msgPrefix + "PendingReds:" + numPendingReduces +
+        " ScheduledMaps:" + numScheduledMaps +
+        " ScheduledReds:" + numScheduledReduces +
+        " AssignedMaps:" + numAssignedMaps +
+        " AssignedReds:" + numAssignedReduces +
+        " CompletedMaps:" + numCompletedMaps +
+        " CompletedReds:" + numCompletedReduces +
+        " ContAlloc:" + numContainersAllocated +
+        " ContRel:" + numContainersReleased +
+        " HostLocal:" + hostLocalAssigned +
+        " RackLocal:" + rackLocalAssigned);
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java Sun Nov 18 22:31:28 2012
@@ -210,7 +210,7 @@ public abstract class RMContainerRequest
       return; //already blacklisted
     }
     Integer failures = nodeFailures.remove(hostName);
-    failures = failures == null ? 0 : failures;
+    failures = failures == null ? Integer.valueOf(0) : failures;
     failures++;
     LOG.info(failures + " failures on node " + hostName);
     if (failures >= maxTaskFailuresPerNode) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java Sun Nov 18 22:31:28 2012
@@ -43,7 +43,6 @@ abstract class StartEndTimesBase impleme
   static final int MINIMUM_COMPLETE_NUMBER_TO_SPECULATE
       = 1;
 
-  protected Configuration conf = null;
   protected AppContext context = null;
 
   protected final Map<TaskAttemptId, Long> startTimes
@@ -82,7 +81,6 @@ abstract class StartEndTimesBase impleme
 
   @Override
   public void contextualize(Configuration conf, AppContext context) {
-    this.conf = conf;
     this.context = context;
 
     Map<JobId, Job> allJobs = context.getAllJobs();

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppView.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppView.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppView.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppView.java Sun Nov 18 22:31:28 2012
@@ -35,7 +35,6 @@ public class AppView extends TwoColumnLa
   protected void commonPreHead(Page.HTML<_> html) {
     set(ACCORDION_ID, "nav");
     set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
-    set(THEMESWITCHER_ID, "themeswitcher");
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java Sun Nov 18 22:31:28 2012
@@ -83,7 +83,6 @@ public class NavBlock extends HtmlBlock 
         li().a("/conf", "Configuration")._().
         li().a("/logs", "Local logs")._().
         li().a("/stacks", "Server stacks")._().
-        li().a("/metrics", "Server metrics")._()._()._().
-    div("#themeswitcher")._();
+        li().a("/metrics", "Server metrics")._()._()._();
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java Sun Nov 18 22:31:28 2012
@@ -21,15 +21,13 @@ package org.apache.hadoop.mapreduce.v2.a
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
 import static org.apache.hadoop.yarn.util.StringHelper.join;
 import static org.apache.hadoop.yarn.util.StringHelper.percent;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
 
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -66,6 +64,8 @@ public class TasksBlock extends HtmlBloc
             th("Finish Time").
             th("Elapsed Time")._()._().
         tbody();
+    StringBuilder tasksTableData = new StringBuilder("[\n");
+
     for (Task task : app.getJob().getTasks().values()) {
       if (type != null && task.getType() != type) {
         continue;
@@ -73,31 +73,28 @@ public class TasksBlock extends HtmlBloc
       TaskInfo info = new TaskInfo(task);
       String tid = info.getId();
       String pct = percent(info.getProgress() / 100);
-      long startTime = info.getStartTime();
-      long finishTime = info.getFinishTime();
-      long elapsed = info.getElapsedTime();
-      tbody.
-        tr().
-          td().
-            br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
-            a(url("task", tid), tid)._().
-          td().
-            br().$title(pct)._().
-            div(_PROGRESSBAR).
-              $title(join(pct, '%')). // tooltip
-              div(_PROGRESSBAR_VALUE).
-                $style(join("width:", pct, '%'))._()._()._().
-          td(info.getState()).
-          td().
-            br().$title(String.valueOf(startTime))._().
-            _(Times.format(startTime))._().
-          td().
-            br().$title(String.valueOf(finishTime))._().
-            _(Times.format(finishTime))._().
-          td().
-            br().$title(String.valueOf(elapsed))._().
-            _(StringUtils.formatTime(elapsed))._()._();
+      tasksTableData.append("[\"<a href='").append(url("task", tid))
+      .append("'>").append(tid).append("</a>\",\"")
+      //Progress bar
+      .append("<br title='").append(pct)
+      .append("'> <div class='").append(C_PROGRESSBAR).append("' title='")
+      .append(join(pct, '%')).append("'> ").append("<div class='")
+      .append(C_PROGRESSBAR_VALUE).append("' style='")
+      .append(join("width:", pct, '%')).append("'> </div> </div>\",\"")
+
+      .append(info.getState()).append("\",\"")
+      .append(info.getStartTime()).append("\",\"")
+      .append(info.getFinishTime()).append("\",\"")
+      .append(info.getElapsedTime()).append("\"],\n");
     }
+    //Remove the last comma and close off the array of arrays
+    if(tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
+      tasksTableData.delete(tasksTableData.length()-2, tasksTableData.length()-1);
+    }
+    tasksTableData.append("]");
+    html.script().$type("text/javascript").
+    _("var tasksTableData=" + tasksTableData)._();
+
     tbody._()._();
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java Sun Nov 18 22:31:28 2012
@@ -37,11 +37,26 @@ public class TasksPage extends AppView {
   }
 
   private String tasksTableInit() {
-    return tableInit().
-        // Sort by id upon page load
-        append(", aaSorting: [[0, 'asc']]").
-        append(",aoColumns:[{sType:'title-numeric'},{sType:'title-numeric',").
-        append("bSearchable:false},null,{sType:'title-numeric'},").
-        append("{sType:'title-numeric'},{sType:'title-numeric'}]}").toString();
+    return tableInit()
+      .append(", 'aaData': tasksTableData")
+      .append(", bDeferRender: true")
+      .append(", bProcessing: true")
+
+      .append("\n, aoColumnDefs: [\n")
+      .append("{'sType':'numeric', 'aTargets': [0]")
+      .append(", 'mRender': parseHadoopID }")
+
+      .append("\n, {'sType':'numeric', bSearchable:false, 'aTargets': [1]")
+      .append(", 'mRender': parseHadoopProgress }")
+
+
+      .append("\n, {'sType':'numeric', 'aTargets': [3, 4]")
+      .append(", 'mRender': renderHadoopDate }")
+
+      .append("\n, {'sType':'numeric', 'aTargets': [5]")
+      .append(", 'mRender': renderHadoopElapsedTime }]")
+
+      // Sort by id upon page load
+      .append(", aaSorting: [[0, 'asc']] }").toString();
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java Sun Nov 18 22:31:28 2012
@@ -285,6 +285,8 @@ public class JobInfo {
         case SCHEDULED:
           ++this.mapsPending;
           break;
+        default:
+          break;
         }
         break;
       case REDUCE:
@@ -296,8 +298,13 @@ public class JobInfo {
         case SCHEDULED:
           ++this.reducesPending;
           break;
+        default:
+          break;
         }
         break;
+      default:
+        throw new IllegalStateException(
+            "Task type is neither map nor reduce: " + task.getType());
       }
       // Attempts counts
       Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
@@ -337,6 +344,9 @@ public class JobInfo {
           this.failedReduceAttempts += failed;
           this.killedReduceAttempts += killed;
           break;
+        default:
+          throw new IllegalStateException("Task type neither map nor reduce: " + 
+              task.getType());
         }
       }
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java Sun Nov 18 22:31:28 2012
@@ -412,7 +412,6 @@ public class TestFetchFailure {
     status.fetchFailedMaps = new ArrayList<TaskAttemptId>();
     status.id = attempt.getID();
     status.mapFinishTime = 0;
-    status.outputSize = 0;
     status.phase = phase;
     status.progress = 0.5f;
     status.shuffleFinishTime = 0;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java Sun Nov 18 22:31:28 2012
@@ -86,7 +86,6 @@ public class TestMRClientService {
     taskAttemptStatus.stateString = "RUNNING";
     taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
     taskAttemptStatus.phase = Phase.MAP;
-    taskAttemptStatus.outputSize = 3;
     // send the status update
     app.getContext().getEventHandler().handle(
         new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Sun Nov 18 22:31:28 2012
@@ -59,6 +59,8 @@ import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 
+import com.google.common.base.Charsets;
+
 /**
  * Helper class for MR applications
  */
@@ -159,7 +161,8 @@ public class MRApps extends Apps {
       }
 
       if (classpathFileStream != null) {
-        reader = new BufferedReader(new InputStreamReader(classpathFileStream));
+        reader = new BufferedReader(new InputStreamReader(classpathFileStream, 
+            Charsets.UTF_8));
         String cp = reader.readLine();
         if (cp != null) {
           Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java Sun Nov 18 22:31:28 2012
@@ -420,6 +420,8 @@ public abstract class FileInputFormat<K,
           }
           break;
         }
+        default:
+          continue; // nothing special to do for this character
       }
     }
     pathStrings.add(commaSeparatedPaths.substring(pathStart, length));

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java Sun Nov 18 22:31:28 2012
@@ -18,6 +18,7 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
 import java.util.List;
@@ -30,6 +31,8 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 /**
  * <code>JobQueueClient</code> is interface provided to the user in order to get
  * JobQueue related information from the {@link JobTracker}
@@ -144,7 +147,8 @@ class JobQueueClient extends Configured 
   private void displayQueueList() throws IOException {
     JobQueueInfo[] rootQueues = jc.getRootQueues();
     for (JobQueueInfo queue : rootQueues) {
-      printJobQueueInfo(queue, new PrintWriter(System.out));
+      printJobQueueInfo(queue, new PrintWriter(new OutputStreamWriter(
+          System.out, Charsets.UTF_8)));
     }
   }
   
@@ -182,7 +186,8 @@ class JobQueueClient extends Configured 
       System.out.println("Queue \"" + queue + "\" does not exist.");
       return;
     }
-    printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out));
+    printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(
+        System.out, Charsets.UTF_8)));
     if (showJobs && (jobQueueInfo.getChildren() == null ||
         jobQueueInfo.getChildren().size() == 0)) {
       JobStatus[] jobs = jobQueueInfo.getJobStatuses();
@@ -223,10 +228,10 @@ class JobQueueClient extends Configured 
     if ("-queueinfo".equals(cmd)) {
       System.err.println(prefix + "[" + cmd + "<job-queue-name> [-showJobs]]");
     } else {
-      System.err.printf(prefix + "<command> <args>\n");
-      System.err.printf("\t[-list]\n");
-      System.err.printf("\t[-info <job-queue-name> [-showJobs]]\n");
-      System.err.printf("\t[-showacls] \n\n");
+      System.err.printf(prefix + "<command> <args>%n");
+      System.err.printf("\t[-list]%n");
+      System.err.printf("\t[-info <job-queue-name> [-showJobs]]%n");
+      System.err.printf("\t[-showacls] %n%n");
       ToolRunner.printGenericCommandUsage(System.out);
     }
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTaskStatus.java Sun Nov 18 22:31:28 2012
@@ -25,8 +25,7 @@ import java.io.IOException;
 
 class MapTaskStatus extends TaskStatus {
 
-  private long mapFinishTime;
-  private long sortFinishTime;
+  private long mapFinishTime = 0;
   
   public MapTaskStatus() {}
 
@@ -49,10 +48,10 @@ class MapTaskStatus extends TaskStatus {
   @Override
   void setFinishTime(long finishTime) {
     super.setFinishTime(finishTime);
-    if (mapFinishTime == 0) {
-      mapFinishTime = finishTime;
+    // set mapFinishTime if it hasn't been set before
+    if (getMapFinishTime() == 0) {
+      setMapFinishTime(finishTime);
     }
-    setSortFinishTime(finishTime);
   }
   
   @Override
@@ -74,16 +73,6 @@ class MapTaskStatus extends TaskStatus {
   void setMapFinishTime(long mapFinishTime) {
     this.mapFinishTime = mapFinishTime;
   }
-
-  @Override
-  public long getSortFinishTime() {
-    return sortFinishTime;
-  }
-
-  @Override
-  void setSortFinishTime(long sortFinishTime) {
-    this.sortFinishTime = sortFinishTime;
-  }
   
   @Override
   synchronized void statusUpdate(TaskStatus status) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java Sun Nov 18 22:31:28 2012
@@ -49,6 +49,8 @@ import org.apache.log4j.Appender;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
+import com.google.common.base.Charsets;
+
 /**
  * A simple logger to handle the task-specific user logs.
  * This class uses the system property <code>hadoop.log.dir</code>.
@@ -104,7 +106,8 @@ public class TaskLog {
   throws IOException {
     File indexFile = getIndexFile(taskid, isCleanup);
     BufferedReader fis = new BufferedReader(new InputStreamReader(
-      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null)));
+      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
+      Charsets.UTF_8));
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
     //stdout:<start-offset in the stdout file> <length>

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java Sun Nov 18 22:31:28 2012
@@ -27,6 +27,8 @@ import org.apache.hadoop.io.LongWritable
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.*;
 
+import com.google.common.base.Charsets;
+
 /** 
  * An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
@@ -59,7 +61,9 @@ public class TextInputFormat extends Fil
     reporter.setStatus(genericSplit.toString());
     String delimiter = job.get("textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
-    if (null != delimiter) recordDelimiterBytes = delimiter.getBytes();
+    if (null != delimiter) {
+      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+    }
     return new LineRecordReader(job, (FileSplit) genericSplit,
         recordDelimiterBytes);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java Sun Nov 18 22:31:28 2012
@@ -49,9 +49,7 @@ public class CombineFileRecordReader<K, 
   protected CombineFileSplit split;
   protected JobConf jc;
   protected Reporter reporter;
-  protected Class<RecordReader<K, V>> rrClass;
   protected Constructor<RecordReader<K, V>> rrConstructor;
-  protected FileSystem fs;
   
   protected int idx;
   protected long progress;
@@ -106,7 +104,6 @@ public class CombineFileRecordReader<K, 
     throws IOException {
     this.split = split;
     this.jc = job;
-    this.rrClass = rrClass;
     this.reporter = reporter;
     this.idx = 0;
     this.curReader = null;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Sun Nov 18 22:31:28 2012
@@ -56,6 +56,8 @@ import org.codehaus.jackson.JsonParseExc
 import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 
+import com.google.common.base.Charsets;
+
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 class JobSubmitter {
@@ -550,7 +552,7 @@ class JobSubmitter {
 
         for(Map.Entry<String, String> ent: nm.entrySet()) {
           credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
-              .getBytes());
+              .getBytes(Charsets.UTF_8));
         }
       } catch (JsonMappingException e) {
         json_error = true;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java Sun Nov 18 22:31:28 2012
@@ -188,7 +188,7 @@ public class HistoryViewer {
              decimal.format(counter.getValue());
 
            buff.append(
-               String.format("\n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", 
+               String.format("%n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", 
                    totalGroup.getDisplayName(),
                    counter.getDisplayName(),
                    mapValue, reduceValue, totalValue));

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java Sun Nov 18 22:31:28 2012
@@ -30,6 +30,8 @@ import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -58,6 +60,8 @@ import org.apache.hadoop.conf.Configurat
 public class DBInputFormat<T extends DBWritable>
     extends InputFormat<LongWritable, T> implements Configurable {
 
+  private static final Log LOG = LogFactory.getLog(DBInputFormat.class);
+  
   private String dbProductName = "DEFAULT";
 
   /**
@@ -354,6 +358,8 @@ public class DBInputFormat<T extends DBW
         this.connection.close();
         this.connection = null;
       }
-    } catch (SQLException sqlE) { } // ignore exception on close.
+    } catch (SQLException sqlE) {
+      LOG.debug("Exception on close", sqlE);
+    }
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java Sun Nov 18 22:31:28 2012
@@ -219,7 +219,6 @@ public abstract class CombineFileInputFo
       Path p = fs.makeQualified(paths[i]);
       newpaths.add(p);
     }
-    paths = null;
 
     // In one single iteration, process all the paths in a single pool.
     // Processing one pool at a time ensures that a split contains paths

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java Sun Nov 18 22:31:28 2012
@@ -46,9 +46,7 @@ public class CombineFileRecordReader<K, 
                                           Integer.class};
 
   protected CombineFileSplit split;
-  protected Class<? extends RecordReader<K,V>> rrClass;
   protected Constructor<? extends RecordReader<K,V>> rrConstructor;
-  protected FileSystem fs;
   protected TaskAttemptContext context;
   
   protected int idx;
@@ -111,7 +109,6 @@ public class CombineFileRecordReader<K, 
     throws IOException {
     this.split = split;
     this.context = context;
-    this.rrClass = rrClass;
     this.idx = 0;
     this.curReader = null;
     this.progress = 0;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java Sun Nov 18 22:31:28 2012
@@ -425,6 +425,8 @@ public abstract class FileInputFormat<K,
           }
           break;
         }
+        default:
+          continue; // nothing special to do for this character
       }
     }
     pathStrings.add(commaSeparatedPaths.substring(pathStart, length));

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java Sun Nov 18 22:31:28 2012
@@ -32,6 +32,8 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
+import com.google.common.base.Charsets;
+
 /** An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
  * the position in the file, and values are the line of text.. */
@@ -47,7 +49,7 @@ public class TextInputFormat extends Fil
         "textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
     if (null != delimiter)
-      recordDelimiterBytes = delimiter.getBytes();
+      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
     return new LineRecordReader(recordDelimiterBytes);
   }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java Sun Nov 18 22:31:28 2012
@@ -18,21 +18,23 @@
 package org.apache.hadoop.mapreduce.security;
 
 
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.PrintStream;
 import java.net.URL;
 
 import javax.crypto.SecretKey;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.record.Utils;
 
+import com.google.common.base.Charsets;
+
 /**
  * 
  * utilities for generating kyes, hashes and verifying them for shuffle
@@ -41,6 +43,8 @@ import org.apache.hadoop.record.Utils;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class SecureShuffleUtils {
+  private static final Log LOG = LogFactory.getLog(SecureShuffleUtils.class);
+  
   public static final String HTTP_HEADER_URL_HASH = "UrlHash";
   public static final String HTTP_HEADER_REPLY_URL_HASH = "ReplyHash";
   
@@ -49,7 +53,8 @@ public class SecureShuffleUtils {
    * @param msg
    */
   public static String generateHash(byte[] msg, SecretKey key) {
-    return new String(Base64.encodeBase64(generateByteHash(msg, key)));
+    return new String(Base64.encodeBase64(generateByteHash(msg, key)), 
+        Charsets.UTF_8);
   }
   
   /**
@@ -80,7 +85,7 @@ public class SecureShuffleUtils {
    */
   public static String hashFromString(String enc_str, SecretKey key) 
   throws IOException {
-    return generateHash(enc_str.getBytes(), key); 
+    return generateHash(enc_str.getBytes(Charsets.UTF_8), key); 
   }
   
   /**
@@ -91,9 +96,9 @@ public class SecureShuffleUtils {
    */
   public static void verifyReply(String base64Hash, String msg, SecretKey key)
   throws IOException {
-    byte[] hash = Base64.decodeBase64(base64Hash.getBytes());
+    byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8));
     
-    boolean res = verifyHash(hash, msg.getBytes(), key);
+    boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key);
     
     if(res != true) {
       throw new IOException("Verification of the hashReply failed");
@@ -126,19 +131,4 @@ public class SecureShuffleUtils {
   private static String buildMsgFrom(String uri_path, String uri_query, int port) {
     return String.valueOf(port) + uri_path + "?" + uri_query;
   }
-  
-  
-  /**
-   * byte array to Hex String
-   * @param ba
-   * @return string with HEX value of the key
-   */
-  public static String toHex(byte[] ba) {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PrintStream ps = new PrintStream(baos);
-    for(byte b: ba) {
-      ps.printf("%x", b);
-    }
-    return baos.toString();
-  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java Sun Nov 18 22:31:28 2012
@@ -144,7 +144,6 @@ public class Shuffle<K, V> implements Ex
     for (Fetcher<K,V> fetcher : fetchers) {
       fetcher.shutDown();
     }
-    fetchers = null;
     
     // stop the scheduler
     scheduler.close();

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Sun Nov 18 22:31:28 2012
@@ -18,6 +18,7 @@
 package org.apache.hadoop.mapreduce.tools;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.List;
@@ -53,6 +54,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.yarn.logaggregation.LogDumper;
 
+import com.google.common.base.Charsets;
+
 /**
  * Interprets the map reduce cli options 
  */
@@ -426,25 +429,25 @@ public class CLI extends Configured impl
           " <job-id> <task-attempt-id>]. " +
           " <task-attempt-id> is optional to get task attempt logs.");      
     } else {
-      System.err.printf(prefix + "<command> <args>\n");
-      System.err.printf("\t[-submit <job-file>]\n");
-      System.err.printf("\t[-status <job-id>]\n");
-      System.err.printf("\t[-counter <job-id> <group-name> <counter-name>]\n");
-      System.err.printf("\t[-kill <job-id>]\n");
+      System.err.printf(prefix + "<command> <args>%n");
+      System.err.printf("\t[-submit <job-file>]%n");
+      System.err.printf("\t[-status <job-id>]%n");
+      System.err.printf("\t[-counter <job-id> <group-name> <counter-name>]%n");
+      System.err.printf("\t[-kill <job-id>]%n");
       System.err.printf("\t[-set-priority <job-id> <priority>]. " +
-        "Valid values for priorities are: " + jobPriorityValues + "\n");
-      System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]\n");
-      System.err.printf("\t[-history <jobHistoryFile>]\n");
-      System.err.printf("\t[-list [all]]\n");
-      System.err.printf("\t[-list-active-trackers]\n");
-      System.err.printf("\t[-list-blacklisted-trackers]\n");
+        "Valid values for priorities are: " + jobPriorityValues + "%n");
+      System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]%n");
+      System.err.printf("\t[-history <jobHistoryFile>]%n");
+      System.err.printf("\t[-list [all]]%n");
+      System.err.printf("\t[-list-active-trackers]%n");
+      System.err.printf("\t[-list-blacklisted-trackers]%n");
       System.err.println("\t[-list-attempt-ids <job-id> <task-type> " +
         "<task-state>]. " +
         "Valid values for <task-type> are " + taskTypes + ". " +
         "Valid values for <task-state> are " + taskStates);
-      System.err.printf("\t[-kill-task <task-attempt-id>]\n");
-      System.err.printf("\t[-fail-task <task-attempt-id>]\n");
-      System.err.printf("\t[-logs <job-id> <task-attempt-id>]\n\n");
+      System.err.printf("\t[-kill-task <task-attempt-id>]%n");
+      System.err.printf("\t[-fail-task <task-attempt-id>]%n");
+      System.err.printf("\t[-logs <job-id> <task-attempt-id>]%n%n");
       ToolRunner.printGenericCommandUsage(System.out);
     }
   }
@@ -584,7 +587,8 @@ public class CLI extends Configured impl
 
   public void displayJobList(JobStatus[] jobs) 
       throws IOException, InterruptedException {
-    displayJobList(jobs, new PrintWriter(System.out));
+    displayJobList(jobs, new PrintWriter(new OutputStreamWriter(System.out,
+        Charsets.UTF_8)));
   }
 
   @Private

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java Sun Nov 18 22:31:28 2012
@@ -19,9 +19,10 @@
 package org.apache.hadoop.mapreduce.util;
 
 import java.io.BufferedReader;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -30,6 +31,8 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import com.google.common.base.Charsets;
+
 /**
  * Plugin to calculate resource information on Linux systems.
  */
@@ -152,9 +155,10 @@ public class LinuxResourceCalculatorPlug
 
     // Read "/proc/memInfo" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsMemFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsMemFile),
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....
@@ -211,9 +215,10 @@ public class LinuxResourceCalculatorPlug
     }
     // Read "/proc/cpuinfo" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsCpuFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsCpuFile), 
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....
@@ -258,9 +263,10 @@ public class LinuxResourceCalculatorPlug
   private void readProcStatFile() {
     // Read "/proc/stat" file
     BufferedReader in = null;
-    FileReader fReader = null;
+    InputStreamReader fReader = null;
     try {
-      fReader = new FileReader(procfsStatFile);
+      fReader = new InputStreamReader(new FileInputStream(procfsStatFile),
+          Charsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // shouldn't happen....

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java Sun Nov 18 22:31:28 2012
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFac
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 



Mime
View raw message