hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r530556 [9/12] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/hbase/src/java/org/apache/hadoop/hbase/ src/contrib/hbase/src/test/org/apache/hadoop/hbase/ src/contrib/streaming/src/java/org/ap...
Date Thu, 19 Apr 2007 21:34:53 GMT
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java Thu Apr 19 14:34:41 2007
@@ -39,12 +39,12 @@
   public static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobHistory");
   private static final String DELIMITER = " ";
   private static final String KEY = "(\\w+)";
-  private static final String VALUE = "[[^\"]?]+" ; // anything but a " in ""
+  private static final String VALUE = "[[^\"]?]+"; // anything but a " in ""
   
   private static final Pattern pattern = Pattern.compile(KEY + "=" + "\"" + VALUE + "\"");
   
   public static final String JOBTRACKER_START_TIME = String.valueOf(System.currentTimeMillis()); 
-  private static final String LOG_DIR = System.getProperty("hadoop.log.dir") + File.separator + "history" ; 
+  private static final String LOG_DIR = System.getProperty("hadoop.log.dir") + File.separator + "history"; 
   public static final String MASTER_INDEX_LOG_FILE = "JobHistory.log"; 
   
   private static PrintWriter masterIndex = null;
@@ -60,7 +60,7 @@
    * It acts as a global namespace for all keys. 
    */
   public static enum Keys { JOBTRACKERID,
-                            START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF,SUBMIT_TIME, LAUNCH_TIME, 
+                            START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME, LAUNCH_TIME, 
                             TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES, FINISHED_MAPS, FINISHED_REDUCES,
                             JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE, ERROR, TASK_ATTEMPT_ID, TASK_STATUS, 
                             COPY_PHASE, SORT_PHASE, REDUCE_PHASE, SHUFFLE_FINISHED, SORT_FINISHED 
@@ -77,29 +77,29 @@
   private static Map<Keys,String> parseBuffer = new HashMap<Keys, String>(); 
 
   // init log files
-  static { init() ; } 
+  static { init(); } 
   
   /**
    * Initialize JobHistory files. 
    *
    */
   private static void init(){
-    if( !disableHistory ){
+    if (!disableHistory){
       try{
         File logDir = new File(LOG_DIR); 
-        if( ! logDir.exists() ){
-          if( ! logDir.mkdirs() ){
+        if (!logDir.exists()){
+          if (!logDir.mkdirs()){
             throw new IOException("Mkdirs failed to create " + logDir.toString());
           }
         }
         masterIndex = 
           new PrintWriter(
-                          new FileOutputStream(new File( LOG_DIR + File.separator + MASTER_INDEX_LOG_FILE), true )) ;
+                          new FileOutputStream(new File(LOG_DIR + File.separator + MASTER_INDEX_LOG_FILE), true));
         // add jobtracker id = tracker start time
         log(masterIndex, RecordTypes.Jobtracker, Keys.START_TIME, JOBTRACKER_START_TIME);  
       }catch(IOException e){
         LOG.error("Failed to initialize JobHistory log file", e); 
-        disableHistory = true ; 
+        disableHistory = true; 
       }
     }
   }
@@ -115,14 +115,14 @@
    */
   public static void parseHistory(File path, Listener l) throws IOException{
     BufferedReader reader = new BufferedReader(new FileReader(path));
-    String line = null ; 
+    String line = null; 
     StringBuffer buf = new StringBuffer(); 
     while ((line = reader.readLine())!= null){
       buf.append(line); 
-      if( ! line.trim().endsWith("\"")){
-        continue ; 
+      if (!line.trim().endsWith("\"")){
+        continue; 
       }
-      parseLine(buf.toString(), l );
+      parseLine(buf.toString(), l);
       buf = new StringBuffer(); 
     }
   }
@@ -135,8 +135,8 @@
   private static void parseLine(String line, Listener l)throws IOException{
     // extract the record type 
     int idx = line.indexOf(' '); 
-    String recType = line.substring(0, idx) ;
-    String data = line.substring(idx+1, line.length()) ;
+    String recType = line.substring(0, idx);
+    String data = line.substring(idx+1, line.length());
     
     Matcher matcher = pattern.matcher(data); 
 
@@ -174,9 +174,9 @@
    */
 
   static void log(PrintWriter out, RecordTypes recordType, Enum[] keys, String[] values){
-    StringBuffer buf = new StringBuffer(recordType.name()) ; 
-    buf.append(DELIMITER) ; 
-    for( int i =0 ; i< keys.length ; i++ ){
+    StringBuffer buf = new StringBuffer(recordType.name()); 
+    buf.append(DELIMITER); 
+    for(int i =0; i< keys.length; i++){
       buf.append(keys[i]);
       buf.append("=\"");
       buf.append(values[i]);
@@ -221,7 +221,7 @@
      */
     public String get(Keys k){
       String s = values.get(k); 
-      return s == null ? "" : s ; 
+      return s == null ? "" : s; 
     }
     /**
      * Convert value from history to int and return. 
@@ -230,7 +230,7 @@
      */
     public int getInt(Keys k){
       String s = values.get(k); 
-      if( null != s ){
+      if (null != s){
         return Integer.parseInt(s);
       }
       return 0; 
@@ -242,7 +242,7 @@
      */
     public long getLong(Keys k){
       String s = values.get(k); 
-      if( null != s ){
+      if (null != s){
         return Long.parseLong(s);
       }
       return 0; 
@@ -307,14 +307,14 @@
     public static void logSubmitted(String jobId, String jobName, String user, 
                                     long submitTime, String jobConf){
       
-      if( ! disableHistory ){
+      if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job, 
                          new Enum[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
-                         new String[]{jobId, jobName, user, String.valueOf(submitTime),jobConf });
+                         new String[]{jobId, jobName, user, String.valueOf(submitTime), jobConf });
         }
         // setup the history log file for this job
-        String logFileName =  JOBTRACKER_START_TIME + "_" + jobId ; 
+        String logFileName =  JOBTRACKER_START_TIME + "_" + jobId; 
         File logFile = new File(LOG_DIR + File.separator + logFileName);
         
         try{
@@ -323,11 +323,11 @@
           // add to writer as well 
           JobHistory.log(writer, RecordTypes.Job, 
                          new Enum[]{Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF }, 
-                         new String[]{jobId, jobName, user, String.valueOf(submitTime) ,jobConf}); 
+                         new String[]{jobId, jobName, user, String.valueOf(submitTime) , jobConf}); 
              
         }catch(IOException e){
           LOG.error("Failed creating job history log file, disabling history", e);
-          disableHistory = true ; 
+          disableHistory = true; 
         }
       }
     }
@@ -339,21 +339,21 @@
      * @param totalReduces total reduces. 
      */
     public static void logStarted(String jobId, long startTime, int totalMaps, int totalReduces){
-      if( ! disableHistory ){
+      if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job, 
                          new Enum[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
                          new String[] {jobId,  String.valueOf(startTime), 
-                                       String.valueOf(totalMaps), String.valueOf(totalReduces) } ) ; 
+                                       String.valueOf(totalMaps), String.valueOf(totalReduces) }); 
         }
         
-        String logFileName =  JOBTRACKER_START_TIME + "_" + jobId ; 
+        String logFileName =  JOBTRACKER_START_TIME + "_" + jobId; 
         PrintWriter writer = (PrintWriter)openJobs.get(logFileName); 
         
-        if( null != writer ){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Job, 
-                         new Enum[] {Keys.JOBID, Keys.LAUNCH_TIME,Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
-                         new String[] {jobId,  String.valueOf(startTime), String.valueOf(totalMaps), String.valueOf(totalReduces)} ) ; 
+                         new Enum[] {Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES },
+                         new String[] {jobId,  String.valueOf(startTime), String.valueOf(totalMaps), String.valueOf(totalReduces)}); 
         }
       }
     }
@@ -368,28 +368,28 @@
      */ 
     public static void logFinished(String jobId, long finishTime, int finishedMaps, int finishedReduces,
                                    int failedMaps, int failedReduces){
-      if( ! disableHistory ){
+      if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job,          
                          new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
                          new String[] {jobId,  "" + finishTime, Values.SUCCESS.name(), 
-                                       String.valueOf(finishedMaps), String.valueOf(finishedReduces) } ) ;
+                                       String.valueOf(finishedMaps), String.valueOf(finishedReduces) });
         }
         
         // close job file for this job
-        String logFileName = JOBTRACKER_START_TIME + "_" + jobId ; 
+        String logFileName = JOBTRACKER_START_TIME + "_" + jobId; 
         PrintWriter writer = openJobs.get(logFileName); 
-        if( null != writer){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Job,          
                          new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES,
                                      Keys.FAILED_MAPS, Keys.FAILED_REDUCES},
                          new String[] {jobId,  "" + finishTime, Values.SUCCESS.name(), 
                                        String.valueOf(finishedMaps), String.valueOf(finishedReduces),
-                                       String.valueOf(failedMaps), String.valueOf(failedReduces)} ) ;
+                                       String.valueOf(failedMaps), String.valueOf(failedReduces)});
           writer.close();
           openJobs.remove(logFileName); 
         }
-        Thread historyCleaner  = new Thread( new HistoryCleaner() );
+        Thread historyCleaner  = new Thread(new HistoryCleaner());
         historyCleaner.start(); 
       }
     }
@@ -400,21 +400,21 @@
      * @param finishedMaps no finished map tasks. 
      * @param finishedReduces no of finished reduce tasks. 
      */
-    public static void logFailed(String jobid, long timestamp, int finishedMaps,int finishedReduces){
-      if( ! disableHistory ){
+    public static void logFailed(String jobid, long timestamp, int finishedMaps, int finishedReduces){
+      if (!disableHistory){
         synchronized(MASTER_INDEX_LOG_FILE){
           JobHistory.log(masterIndex, RecordTypes.Job,
                          new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
                          new String[] {jobid,  String.valueOf(timestamp), Values.FAILED.name(), String.valueOf(finishedMaps), 
-                                       String.valueOf(finishedReduces)} ) ; 
+                                       String.valueOf(finishedReduces)}); 
         }
-        String logFileName =  JOBTRACKER_START_TIME + "_" + jobid ; 
+        String logFileName =  JOBTRACKER_START_TIME + "_" + jobid; 
         PrintWriter writer = (PrintWriter)openJobs.get(logFileName); 
-        if( null != writer){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Job,
-                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS,Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
+                         new Enum[] {Keys.JOBID, Keys.FINISH_TIME, Keys.JOB_STATUS, Keys.FINISHED_MAPS, Keys.FINISHED_REDUCES },
                          new String[] {jobid,  String.valueOf(timestamp), Values.FAILED.name(), String.valueOf(finishedMaps), 
-                                       String.valueOf(finishedReduces)} ) ; 
+                                       String.valueOf(finishedReduces)}); 
           writer.close();
           openJobs.remove(logFileName); 
         }
@@ -438,11 +438,11 @@
      */
     public static void logStarted(String jobId, String taskId, String taskType, 
                                   long startTime){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
-        if( null != writer ){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE , Keys.START_TIME}, 
-                         new String[]{taskId, taskType, String.valueOf(startTime)}) ;
+                         new String[]{taskId, taskType, String.valueOf(startTime)});
         }
       }
     }
@@ -455,12 +455,12 @@
      */
     public static void logFinished(String jobId, String taskId, String taskType, 
                                    long finishTime){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
-        if( null != writer ){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE, 
                                                               Keys.TASK_STATUS, Keys.FINISH_TIME}, 
-                         new String[]{ taskId,taskType, Values.SUCCESS.name(), String.valueOf(finishTime)}) ;
+                         new String[]{ taskId, taskType, Values.SUCCESS.name(), String.valueOf(finishTime)});
         }
       }
     }
@@ -473,12 +473,12 @@
      * @param error error message for failure. 
      */
     public static void logFailed(String jobId, String taskId, String taskType, long time, String error){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId); 
-        if( null != writer ){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.Task, new Enum[]{Keys.TASKID, Keys.TASK_TYPE, 
                                                               Keys.TASK_STATUS, Keys.FINISH_TIME, Keys.ERROR}, 
-                         new String[]{ taskId,  taskType, Values.FAILED.name(), String.valueOf(time) , error}) ;
+                         new String[]{ taskId,  taskType, Values.FAILED.name(), String.valueOf(time) , error});
         }
       }
     }
@@ -508,15 +508,15 @@
      * @param startTime start time of task attempt as reported by task tracker. 
      * @param hostName host name of the task attempt. 
      */
-    public static void logStarted(String jobId, String taskId,String taskAttemptId, long startTime, String hostName){
-      if( ! disableHistory ){
+    public static void logStarted(String jobId, String taskId, String taskAttemptId, long startTime, String hostName){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
-          JobHistory.log( writer, RecordTypes.MapAttempt, 
-                          new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, 
-                                      Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
-                          new String[]{Values.MAP.name(),  taskId, 
-                                       taskAttemptId, String.valueOf(startTime), hostName} ) ; 
+        if (null != writer){
+          JobHistory.log(writer, RecordTypes.MapAttempt, 
+                         new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, 
+                                     Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
+                         new String[]{Values.MAP.name(),  taskId, 
+                                      taskAttemptId, String.valueOf(startTime), hostName}); 
         }
       }
     }
@@ -529,14 +529,14 @@
      * @param hostName host name 
      */
     public static void logFinished(String jobId, String taskId, String taskAttemptId, long finishTime, String hostName){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
+        if (null != writer){
           JobHistory.log(writer, RecordTypes.MapAttempt, 
                          new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
                                      Keys.FINISH_TIME, Keys.HOSTNAME},
                          new String[]{Values.MAP.name(), taskId, taskAttemptId, Values.SUCCESS.name(),  
-                                      String.valueOf(finishTime), hostName} ) ; 
+                                      String.valueOf(finishTime), hostName}); 
         }
       }
     }
@@ -551,14 +551,14 @@
      */
     public static void logFailed(String jobId, String taskId, String taskAttemptId, 
                                  long timestamp, String hostName, String error){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
-          JobHistory.log( writer, RecordTypes.MapAttempt, 
-                          new Enum[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
-                                     Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR},
-                          new String[]{ Values.MAP.name(), taskId, taskAttemptId, Values.FAILED.name(),
-                                        String.valueOf(timestamp), hostName, error} ) ; 
+        if (null != writer){
+          JobHistory.log(writer, RecordTypes.MapAttempt, 
+                         new Enum[]{Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                                    Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR},
+                         new String[]{ Values.MAP.name(), taskId, taskAttemptId, Values.FAILED.name(),
+                                       String.valueOf(timestamp), hostName, error}); 
         }
       }
     } 
@@ -578,14 +578,14 @@
      */
     public static void logStarted(String jobId, String taskId, String taskAttemptId, 
                                   long startTime, String hostName){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
-          JobHistory.log( writer, RecordTypes.ReduceAttempt, 
-                          new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, 
-                                       Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
-                          new String[]{Values.REDUCE.name(),  taskId, 
-                                       taskAttemptId, String.valueOf(startTime), hostName} ) ; 
+        if (null != writer){
+          JobHistory.log(writer, RecordTypes.ReduceAttempt, 
+                         new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, 
+                                      Keys.TASK_ATTEMPT_ID, Keys.START_TIME, Keys.HOSTNAME},
+                         new String[]{Values.REDUCE.name(),  taskId, 
+                                      taskAttemptId, String.valueOf(startTime), hostName}); 
         }
       }
     }
@@ -601,15 +601,15 @@
      */
     public static void logFinished(String jobId, String taskId, String taskAttemptId, 
                                    long shuffleFinished, long sortFinished, long finishTime, String hostName){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
-          JobHistory.log( writer, RecordTypes.ReduceAttempt, 
-                          new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
-                                      Keys.SHUFFLE_FINISHED, Keys.SORT_FINISHED, Keys.FINISH_TIME, Keys.HOSTNAME},
-                          new String[]{Values.REDUCE.name(),  taskId, taskAttemptId, Values.SUCCESS.name(), 
-                                       String.valueOf(shuffleFinished), String.valueOf(sortFinished),
-                                       String.valueOf(finishTime), hostName} ) ; 
+        if (null != writer){
+          JobHistory.log(writer, RecordTypes.ReduceAttempt, 
+                         new Enum[]{ Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                                     Keys.SHUFFLE_FINISHED, Keys.SORT_FINISHED, Keys.FINISH_TIME, Keys.HOSTNAME},
+                         new String[]{Values.REDUCE.name(),  taskId, taskAttemptId, Values.SUCCESS.name(), 
+                                      String.valueOf(shuffleFinished), String.valueOf(sortFinished),
+                                      String.valueOf(finishTime), hostName}); 
         }
       }
     }
@@ -622,16 +622,16 @@
      * @param hostName host name of the task attempt.  
      * @param error error message of the task. 
      */
-    public static void logFailed(String jobId, String taskId,String taskAttemptId, long timestamp, 
+    public static void logFailed(String jobId, String taskId, String taskAttemptId, long timestamp, 
                                  String hostName, String error){
-      if( ! disableHistory ){
+      if (!disableHistory){
         PrintWriter writer = (PrintWriter)openJobs.get(JOBTRACKER_START_TIME + "_" + jobId);
-        if( null != writer ){
-          JobHistory.log( writer, RecordTypes.ReduceAttempt, 
-                          new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID,Keys.TASK_STATUS, 
-                                       Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR },
-                          new String[]{ Values.REDUCE.name(), taskId, taskAttemptId, Values.FAILED.name(), 
-                                        String.valueOf(timestamp), hostName, error } ) ; 
+        if (null != writer){
+          JobHistory.log(writer, RecordTypes.ReduceAttempt, 
+                         new Enum[]{  Keys.TASK_TYPE, Keys.TASKID, Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS, 
+                                      Keys.FINISH_TIME, Keys.HOSTNAME, Keys.ERROR },
+                         new String[]{ Values.REDUCE.name(), taskId, taskAttemptId, Values.FAILED.name(), 
+                                       String.valueOf(timestamp), hostName, error }); 
         }
       }
     }
@@ -661,24 +661,24 @@
   public static class HistoryCleaner implements Runnable{
     static final long ONE_DAY_IN_MS = 24 * 60 * 60 * 1000L;
     static final long THIRTY_DAYS_IN_MS = 30 * ONE_DAY_IN_MS;
-    private long now ; 
+    private long now; 
     private static boolean isRunning = false; 
-    private static long lastRan ; 
+    private static long lastRan; 
 
     /**
      * Cleans up history data. 
      */
     public void run(){
-      if( isRunning ){
-        return ; 
+      if (isRunning){
+        return; 
       }
-      now = System.currentTimeMillis() ;
+      now = System.currentTimeMillis();
       // clean history only once a day at max
-      if( lastRan ==0 || (now - lastRan) < ONE_DAY_IN_MS ){
-        return ; 
+      if (lastRan ==0 || (now - lastRan) < ONE_DAY_IN_MS){
+        return; 
       }
       lastRan = now;  
-      isRunning = true ; 
+      isRunning = true; 
       // update master Index first
       try{
         File logFile = new File(
@@ -693,12 +693,12 @@
           // remove the jobtracker start timestamp as well. 
           for (String jobTrackerId : jobTrackersToJobs.keySet()){
             Map<String, JobHistory.JobInfo> jobs = jobTrackersToJobs.get(jobTrackerId);
-            for(Iterator iter = jobs.keySet().iterator(); iter.hasNext() ; iter.next()){
+            for(Iterator iter = jobs.keySet().iterator(); iter.hasNext(); iter.next()){
               JobHistory.JobInfo job = jobs.get(iter.next());
-              if( now - job.getLong(Keys.SUBMIT_TIME) > THIRTY_DAYS_IN_MS ) {
+              if (now - job.getLong(Keys.SUBMIT_TIME) > THIRTY_DAYS_IN_MS) {
                 iter.remove(); 
               }
-              if( jobs.size() == 0 ){
+              if (jobs.size() == 0){
                 iter.remove(); 
               }
             }
@@ -711,7 +711,7 @@
             
             log(masterIndex, RecordTypes.Jobtracker, Keys.START_TIME, jobTrackerId);
 
-            for(String jobId : jobs.keySet() ){
+            for(String jobId : jobs.keySet()){
               JobHistory.JobInfo job = jobs.get(jobId);
               Map<Keys, String> values = job.getValues();
               
@@ -730,17 +730,17 @@
       File[] oldFiles = new File(LOG_DIR).listFiles(new FileFilter(){
           public boolean accept(File file){
             // delete if older than 30 days
-            if( now - file.lastModified() > THIRTY_DAYS_IN_MS ){
-              return true ; 
+            if (now - file.lastModified() > THIRTY_DAYS_IN_MS){
+              return true; 
             }
             return false; 
           }
         });
-      for( File f : oldFiles){
+      for(File f : oldFiles){
         f.delete(); 
         LOG.info("Deleting old history file : " + f.getName());
       }
-      isRunning = false ; 
+      isRunning = false; 
     }
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Thu Apr 19 14:34:41 2007
@@ -61,13 +61,13 @@
   int runningReduceTasks = 0;
   int finishedMapTasks = 0;
   int finishedReduceTasks = 0;
-  int failedMapTasks = 0 ; 
-  int failedReduceTasks = 0 ; 
+  int failedMapTasks = 0; 
+  int failedReduceTasks = 0; 
   JobTracker jobtracker = null;
   Map<String,List<TaskInProgress>> hostToMaps =
     new HashMap<String,List<TaskInProgress>>();
-  private int taskCompletionEventTracker = 0 ; 
-  List<TaskCompletionEvent> taskCompletionEvents ;
+  private int taskCompletionEventTracker = 0; 
+  List<TaskCompletionEvent> taskCompletionEvents;
     
   // The no. of tasktrackers where >= conf.getMaxTaskFailuresPerTracker()
   // tasks have failed
@@ -122,7 +122,7 @@
     this.numMapTasks = conf.getNumMapTasks();
     this.numReduceTasks = conf.getNumReduceTasks();
     this.taskCompletionEvents = new ArrayList<TaskCompletionEvent>(
-                                              numMapTasks + numReduceTasks + 10);
+                                                                   numMapTasks + numReduceTasks + 10);
         
     JobHistory.JobInfo.logSubmitted(jobid, conf.getJobName(), conf.getUser(), 
                                     System.currentTimeMillis(), jobFile); 
@@ -284,7 +284,7 @@
    * Return a vector of completed TaskInProgress objects
    */
   public Vector<TaskInProgress> reportTasksInProgress(boolean shouldBeMap,
-      boolean shouldBeComplete) {
+                                                      boolean shouldBeComplete) {
     
     Vector<TaskInProgress> results = new Vector<TaskInProgress>();
     TaskInProgress tips[] = null;
@@ -317,7 +317,7 @@
       TaskTrackerStatus ttStatus = 
         this.jobtracker.getTaskTracker(status.getTaskTracker());
       String httpTaskLogLocation = null; 
-      if( null != ttStatus ){
+      if (null != ttStatus){
         httpTaskLogLocation = "http://" + ttStatus.getHost() + ":" + 
           ttStatus.getHttpPort() + "/tasklog.jsp?plaintext=true&taskid=" +
           status.getTaskId() + "&all=true";
@@ -432,7 +432,7 @@
    */
   public Task obtainNewMapTask(TaskTrackerStatus tts, int clusterSize
                                ) throws IOException {
-    if (! tasksInited) {
+    if (!tasksInited) {
       LOG.info("Cannot create task split for " + profile.getJobId());
       return null;
     }
@@ -460,7 +460,7 @@
    */
   public Task obtainNewReduceTask(TaskTrackerStatus tts,
                                   int clusterSize) throws IOException {
-    if (! tasksInited) {
+    if (!tasksInited) {
       LOG.info("Cannot create task split for " + profile.getJobId());
       return null;
     }
@@ -620,7 +620,7 @@
             return i;
           } else if (specTarget == -1 &&
                      task.hasSpeculativeTask(avgProgress) && 
-                     ! task.hasRunOnMachine(taskTracker)) {
+                     !task.hasRunOnMachine(taskTracker)) {
             specTarget = i;
           }
         }
@@ -664,7 +664,7 @@
 
     // Update jobhistory 
     String taskTrackerName = status.getTaskTracker();
-    if(status.getIsMap()){
+    if (status.getIsMap()){
       JobHistory.MapAttempt.logStarted(profile.getJobId(), 
                                        tip.getTIPId(), status.getTaskId(), status.getStartTime(), 
                                        taskTrackerName); 
@@ -704,7 +704,7 @@
     //
     boolean allDone = true;
     for (int i = 0; i < maps.length; i++) {
-      if (! maps[i].isComplete()) {
+      if (!maps[i].isComplete()) {
         allDone = false;
         break;
       }
@@ -714,7 +714,7 @@
         this.status.setMapProgress(1.0f);              
       }
       for (int i = 0; i < reduces.length; i++) {
-        if (! reduces[i].isComplete()) {
+        if (!reduces[i].isComplete()) {
           allDone = false;
           break;
         }
@@ -953,7 +953,7 @@
   synchronized public TaskCompletionEvent[] getTaskCompletionEvents(
                                                                     int fromEventId, int maxEvents) {
     TaskCompletionEvent[] events = TaskCompletionEvent.EMPTY_ARRAY;
-    if( taskCompletionEvents.size() > fromEventId) {
+    if (taskCompletionEvents.size() > fromEventId) {
       int actualMax = Math.min(maxEvents, 
                                (taskCompletionEvents.size() - fromEventId));
       events = (TaskCompletionEvent[])taskCompletionEvents.subList(

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Thu Apr 19 14:34:41 2007
@@ -565,18 +565,18 @@
    */
   TreeSet<TaskTrackerStatus> trackerExpiryQueue =
     new TreeSet<TaskTrackerStatus>(
-      new Comparator<TaskTrackerStatus>() {
-        public int compare(TaskTrackerStatus p1, TaskTrackerStatus p2) {
-          if (p1.getLastSeen() < p2.getLastSeen()) {
-            return -1;
-          } else if (p1.getLastSeen() > p2.getLastSeen()) {
-            return 1;
-          } else {
-            return (p1.getTrackerName().compareTo(p2.getTrackerName()));
-          }
-        }
-      }
-    );
+                                   new Comparator<TaskTrackerStatus>() {
+                                     public int compare(TaskTrackerStatus p1, TaskTrackerStatus p2) {
+                                       if (p1.getLastSeen() < p2.getLastSeen()) {
+                                         return -1;
+                                       } else if (p1.getLastSeen() > p2.getLastSeen()) {
+                                         return 1;
+                                       } else {
+                                         return (p1.getTrackerName().compareTo(p2.getTrackerName()));
+                                       }
+                                     }
+                                   }
+                                   );
 
   // Used to provide an HTML view on Job, Task, and TaskTracker structures
   StatusHttpServer infoServer;
@@ -628,10 +628,10 @@
     InetSocketAddress addr = getAddress(conf);
     this.localMachine = addr.getHostName();
     this.port = addr.getPort();
-    this.interTrackerServer = RPC.getServer(this,addr.getHostName(), addr.getPort(), 10, false, conf);
+    this.interTrackerServer = RPC.getServer(this, addr.getHostName(), addr.getPort(), 10, false, conf);
     this.interTrackerServer.start();
     Properties p = System.getProperties();
-    for (Iterator it = p.keySet().iterator(); it.hasNext(); ) {
+    for (Iterator it = p.keySet().iterator(); it.hasNext();) {
       String key = (String) it.next();
       String val = (String) p.getProperty(key);
       LOG.info("Property '" + key + "' is " + val);
@@ -886,7 +886,7 @@
     // Mark the 'non-running' tasks for pruning
     markCompletedJob(job);
 
-      JobEndNotifier.registerNotification(job.getJobConf(), job.getStatus());
+    JobEndNotifier.registerNotification(job.getJobConf(), job.getStatus());
 
     // Purge oldest jobs and keep at-most MAX_COMPLETE_USER_JOBS_IN_MEMORY jobs of a given user
     // in memory; information about the purged jobs is available via
@@ -964,7 +964,7 @@
   }
   public Vector<JobInProgress> runningJobs() {
     Vector<JobInProgress> v = new Vector<JobInProgress>();
-    for (Iterator it = jobs.values().iterator(); it.hasNext(); ) {
+    for (Iterator it = jobs.values().iterator(); it.hasNext();) {
       JobInProgress jip = (JobInProgress) it.next();
       JobStatus status = jip.getStatus();
       if (status.getRunState() == JobStatus.RUNNING) {
@@ -984,7 +984,7 @@
   }
   public Vector<JobInProgress> failedJobs() {
     Vector<JobInProgress> v = new Vector<JobInProgress>();
-    for (Iterator it = jobs.values().iterator(); it.hasNext(); ) {
+    for (Iterator it = jobs.values().iterator(); it.hasNext();) {
       JobInProgress jip = (JobInProgress) it.next();
       JobStatus status = jip.getStatus();
       if (status.getRunState() == JobStatus.FAILED) {
@@ -995,7 +995,7 @@
   }
   public Vector<JobInProgress> completedJobs() {
     Vector<JobInProgress> v = new Vector<JobInProgress>();
-    for (Iterator it = jobs.values().iterator(); it.hasNext(); ) {
+    for (Iterator it = jobs.values().iterator(); it.hasNext();) {
       JobInProgress jip = (JobInProgress) it.next();
       JobStatus status = jip.getStatus();
       if (status.getRunState() == JobStatus.SUCCEEDED) {
@@ -1230,7 +1230,7 @@
     int totalCapacity = numTaskTrackers * maxCurrentTasks;
 
     synchronized(jobsByArrival){
-      for (Iterator it = jobsByArrival.iterator(); it.hasNext(); ) {
+      for (Iterator it = jobsByArrival.iterator(); it.hasNext();) {
         JobInProgress job = (JobInProgress) it.next();
         if (job.getStatus().getRunState() == JobStatus.RUNNING) {
           int totalMapTasks = job.desiredMaps();
@@ -1278,7 +1278,7 @@
       if (numMaps < maxMapLoad) {
 
         int totalNeededMaps = 0;
-        for (Iterator it = jobsByArrival.iterator(); it.hasNext(); ) {
+        for (Iterator it = jobsByArrival.iterator(); it.hasNext();) {
           JobInProgress job = (JobInProgress) it.next();
           if (job.getStatus().getRunState() != JobStatus.RUNNING) {
             continue;
@@ -1314,7 +1314,7 @@
       if (numReduces < maxReduceLoad) {
 
         int totalNeededReduces = 0;
-        for (Iterator it = jobsByArrival.iterator(); it.hasNext(); ) {
+        for (Iterator it = jobsByArrival.iterator(); it.hasNext();) {
           JobInProgress job = (JobInProgress) it.next();
           if (job.getStatus().getRunState() != JobStatus.RUNNING ||
               job.numReduceTasks == 0) {
@@ -1354,13 +1354,13 @@
    * closed (because the job completed, whether successfully or not)
    */
   private synchronized List<TaskTrackerAction> getTasksToKill(
-      String taskTracker) {
+                                                              String taskTracker) {
     
     Set<String> taskIds = trackerToTaskMap.get(taskTracker);
     if (taskIds != null) {
       List<TaskTrackerAction> killList = new ArrayList<TaskTrackerAction>();
       Set<String> killJobIds = new TreeSet<String>(); 
-      for (String killTaskId : taskIds ) {
+      for (String killTaskId : taskIds) {
         TaskInProgress tip = taskidToTIPMap.get(killTaskId);
         if (tip.shouldCloseForClosedJob(killTaskId)) {
           // 
@@ -1492,13 +1492,13 @@
       Vector<TaskReport> reports = new Vector<TaskReport>();
       Vector<TaskInProgress> completeMapTasks =
         job.reportTasksInProgress(true, true);
-      for (Iterator it = completeMapTasks.iterator(); it.hasNext(); ) {
+      for (Iterator it = completeMapTasks.iterator(); it.hasNext();) {
         TaskInProgress tip = (TaskInProgress) it.next();
         reports.add(tip.generateSingleReport());
       }
       Vector<TaskInProgress> incompleteMapTasks =
         job.reportTasksInProgress(true, false);
-      for (Iterator it = incompleteMapTasks.iterator(); it.hasNext(); ) {
+      for (Iterator it = incompleteMapTasks.iterator(); it.hasNext();) {
         TaskInProgress tip = (TaskInProgress) it.next();
         reports.add(tip.generateSingleReport());
       }
@@ -1513,12 +1513,12 @@
     } else {
       Vector<TaskReport> reports = new Vector<TaskReport>();
       Vector completeReduceTasks = job.reportTasksInProgress(false, true);
-      for (Iterator it = completeReduceTasks.iterator(); it.hasNext(); ) {
+      for (Iterator it = completeReduceTasks.iterator(); it.hasNext();) {
         TaskInProgress tip = (TaskInProgress) it.next();
         reports.add(tip.generateSingleReport());
       }
       Vector incompleteReduceTasks = job.reportTasksInProgress(false, false);
-      for (Iterator it = incompleteReduceTasks.iterator(); it.hasNext(); ) {
+      for (Iterator it = incompleteReduceTasks.iterator(); it.hasNext();) {
         TaskInProgress tip = (TaskInProgress) it.next();
         reports.add(tip.generateSingleReport());
       }
@@ -1604,7 +1604,7 @@
     
   public JobStatus[] jobsToComplete() {
     Vector<JobStatus> v = new Vector<JobStatus>();
-    for (Iterator it = jobs.values().iterator(); it.hasNext(); ) {
+    for (Iterator it = jobs.values().iterator(); it.hasNext();) {
       JobInProgress jip = (JobInProgress) it.next();
       JobStatus status = jip.getStatus();
       if (status.getRunState() == JobStatus.RUNNING 
@@ -1665,7 +1665,7 @@
     trackerToTaskMap.remove(trackerName);
 
     if (lostTasks != null) {
-      for (Iterator it = lostTasks.iterator(); it.hasNext(); ) {
+      for (Iterator it = lostTasks.iterator(); it.hasNext();) {
         String taskId = (String) it.next();
         TaskInProgress tip = taskidToTIPMap.get(taskId);
 
@@ -1709,8 +1709,8 @@
     try {
       Configuration conf=new Configuration();
       startTracker(conf);
-    } catch ( Throwable e ) {
-      LOG.fatal( StringUtils.stringifyException( e ) );
+    } catch (Throwable e) {
+      LOG.fatal(StringUtils.stringifyException(e));
       System.exit(-1);
     }
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java Thu Apr 19 14:34:41 2007
@@ -29,7 +29,7 @@
 public class KeyValueTextInputFormat extends TextInputFormat {
 
   public RecordReader getRecordReader(InputSplit genericSplit, JobConf job,
-      Reporter reporter) throws IOException {
+                                      Reporter reporter) throws IOException {
     reporter.setStatus(genericSplit.toString());
     return new KeyValueLineRecordReader(job, (FileSplit) genericSplit);
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java Thu Apr 19 14:34:41 2007
@@ -108,7 +108,7 @@
         job.setNumReduceTasks(1);                 // force a single reduce task
         DataOutputBuffer buffer = new DataOutputBuffer();
         for (int i = 0; i < splits.length; i++) {
-          String mapId = "map_" + newId() ; 
+          String mapId = "map_" + newId(); 
           mapIds.add(mapId);
           buffer.reset();
           splits[i].write(buffer);
@@ -180,7 +180,7 @@
     }
     
     private String newId() {
-      return Integer.toString(Math.abs(random.nextInt()),36);
+      return Integer.toString(Math.abs(random.nextInt()), 36);
     }
 
     // TaskUmbilicalProtocol methods

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Thu Apr 19 14:34:41 2007
@@ -36,7 +36,7 @@
 
   public RecordWriter getRecordWriter(FileSystem ignored, JobConf job,
                                       String name, Progressable progress)
-                                      throws IOException {
+    throws IOException {
 
     Path file = new Path(job.getOutputPath(), name);
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java Thu Apr 19 14:34:41 2007
@@ -34,13 +34,13 @@
  * {@link InterTrackerProtocol}. */ 
 class MapOutputLocation implements Writable, MRConstants {
 
-    static {                                      // register a ctor
-      WritableFactories.setFactory
-        (MapOutputLocation.class,
-         new WritableFactory() {
-           public Writable newInstance() { return new MapOutputLocation(); }
-         });
-    }
+  static {                                      // register a ctor
+    WritableFactories.setFactory
+      (MapOutputLocation.class,
+       new WritableFactory() {
+         public Writable newInstance() { return new MapOutputLocation(); }
+       });
+  }
 
   private String mapTaskId;
   private int mapId;
@@ -93,7 +93,7 @@
 
   public String toString() {
     return "http://" + host + ":" + port + "/mapOutput?map=" + 
-            mapTaskId;
+      mapTaskId;
   }
   
   /**
@@ -134,7 +134,7 @@
           int len = input.read(buffer);
           while (len > 0) {
             totalBytes += len;
-            output.write(buffer, 0 ,len);
+            output.write(buffer, 0 , len);
             if (pingee != null) {
               pingee.progress();
             }
@@ -207,13 +207,13 @@
       int length = connection.getContentLength();
       int inMemFSSize = inMemFileSys.getFSSize();
       int checksumLength = (int)inMemFileSys.getChecksumFileLength(
-              localFilename, length);
+                                                                   localFilename, length);
         
       boolean createInMem = false; 
       if (inMemFSSize > 0)  
         createInMem = (((float)(length + checksumLength) / inMemFSSize <= 
-        MAX_INMEM_FILESIZE_FRACTION) && 
-        inMemFileSys.reserveSpaceWithCheckSum(localFilename, length));
+                        MAX_INMEM_FILESIZE_FRACTION) && 
+                       inMemFileSys.reserveSpaceWithCheckSum(localFilename, length));
       
       if (createInMem)
         fileSys = inMemFileSys;
@@ -232,7 +232,7 @@
             totalBytes += len;
             shuffleMetrics.incrMetric("shuffle_input_bytes", len);
             shuffleMetrics.update();
-            output.write(buffer, 0 ,len);
+            output.write(buffer, 0 , len);
             if (currentThread.isInterrupted()) {
               throw new InterruptedException();
             }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java Thu Apr 19 14:34:41 2007
@@ -48,7 +48,7 @@
         mapper.map(key, value, output, reporter);
       }
     } finally {
-        mapper.close();
+      mapper.close();
     }
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java Thu Apr 19 14:34:41 2007
@@ -35,7 +35,7 @@
    * @return a {@link RecordWriter}
    */
   RecordWriter getRecordWriter(FileSystem ignored, JobConf job, String name,
-          Progressable progress)
+                               Progressable progress)
     throws IOException;
 
   /** Check whether the output specification for a job is appropriate.  Called

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java Thu Apr 19 14:34:41 2007
@@ -85,8 +85,8 @@
     throws IOException;
 
   public void checkOutputSpecs(FileSystem ignored, JobConf job) 
-          throws FileAlreadyExistsException, 
-             InvalidJobConfException, IOException {
+    throws FileAlreadyExistsException, 
+           InvalidJobConfException, IOException {
     // Ensure that the output directory is set and not already there
     Path outDir = job.getOutputPath();
     if (outDir == null && job.getNumReduceTasks() != 0) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/PhasedFileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/PhasedFileSystem.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/PhasedFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/PhasedFileSystem.java Thu Apr 19 14:34:41 2007
@@ -30,11 +30,11 @@
   // Map from final file name to temporary file name
   private Map<Path, FileInfo> finalNameToFileInfo = new HashMap<Path, FileInfo>(); 
   
-  private String jobid ; 
-  private String tipid ; 
-  private String taskid ; 
+  private String jobid; 
+  private String tipid; 
+  private String taskid; 
   
-  private Path tempDir ; 
+  private Path tempDir; 
   /**
    * This Constructor is used to wrap a FileSystem object to a 
    * Phased FilsSystem.  
@@ -47,10 +47,10 @@
                           String tipid, String taskid) {
     super(fs); 
     this.jobid = jobid; 
-    this.tipid = tipid ; 
-    this.taskid = taskid ; 
+    this.tipid = tipid; 
+    this.taskid = taskid; 
     
-    tempDir = new Path(fs.getConf().get("mapred.system.dir") ); 
+    tempDir = new Path(fs.getConf().get("mapred.system.dir")); 
     this.setConf(fs.getConf());
   }
   /**
@@ -63,15 +63,15 @@
     super(fs); 
     this.jobid = conf.get("mapred.job.id"); 
     this.tipid = conf.get("mapred.tip.id"); 
-    this.taskid = conf.get("mapred.task.id") ; 
+    this.taskid = conf.get("mapred.task.id"); 
     
-    tempDir = new Path(fs.getConf().get("mapred.system.dir") );
+    tempDir = new Path(fs.getConf().get("mapred.system.dir"));
     this.setConf(fs.getConf());
   }
   
   private Path setupFile(Path finalFile, boolean overwrite) throws IOException{
-    if( finalNameToFileInfo.containsKey(finalFile) ){
-      if( !overwrite ){
+    if (finalNameToFileInfo.containsKey(finalFile)){
+      if (!overwrite){
         throw new IOException("Error, file already exists : " + 
                               finalFile.toString()); 
       }else{
@@ -82,8 +82,8 @@
         }catch(IOException ioe){
           // ignore if already closed
         }
-        if( fs.exists(fInfo.getTempPath())){
-          fs.delete( fInfo.getTempPath() );
+        if (fs.exists(fInfo.getTempPath())){
+          fs.delete(fInfo.getTempPath());
         }
         finalNameToFileInfo.remove(finalFile); 
       }
@@ -96,20 +96,20 @@
     
     finalNameToFileInfo.put(finalFile, fInfo);
     
-    return tempPath ; 
+    return tempPath; 
   }
   
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
-                                   short replication, long blockSize,Progressable progress)
+                                   short replication, long blockSize, Progressable progress)
     throws IOException {
-    if( fs.exists(f) && !overwrite ){
+    if (fs.exists(f) && !overwrite){
       throw new IOException("Error creating file - already exists : " + f); 
     }
     FSDataOutputStream stream = 
       fs.create(setupFile(f, overwrite), overwrite, bufferSize, replication, 
                 blockSize, progress);
     finalNameToFileInfo.get(f).setOpenFileStream(stream); 
-    return stream ; 
+    return stream; 
   }
   
   /**
@@ -125,8 +125,8 @@
   // use extra method arg to avoid concurrentModificationException 
   // if committing using this method while iterating.  
   private void commit(Path fPath , boolean removeFromMap)throws IOException{
-    FileInfo fInfo = finalNameToFileInfo.get(fPath) ; 
-    if( null == fInfo ){
+    FileInfo fInfo = finalNameToFileInfo.get(fPath); 
+    if (null == fInfo){
       throw new IOException("Error committing file! File was not created " + 
                             "with PhasedFileSystem : " + fPath); 
     }
@@ -138,18 +138,18 @@
     }
     Path tempPath = fInfo.getTempPath(); 
     // ignore .crc files 
-    if(! tempPath.toString().endsWith(".crc")){
-      if( !fs.exists(fPath) || fInfo.isOverwrite()){
-        if(!fs.exists(fPath.getParent())){
+    if (!tempPath.toString().endsWith(".crc")){
+      if (!fs.exists(fPath) || fInfo.isOverwrite()){
+        if (!fs.exists(fPath.getParent())){
           fs.mkdirs(fPath.getParent());
         }
         
-        if( fs.exists(fPath) && fInfo.isOverwrite()){
+        if (fs.exists(fPath) && fInfo.isOverwrite()){
           fs.delete(fPath); 
         }
         
         try {
-          if( !fs.rename(fInfo.getTempPath(), fPath) ){
+          if (!fs.rename(fInfo.getTempPath(), fPath)){
             // delete the temp file if rename failed
             fs.delete(fInfo.getTempPath());
           }
@@ -164,7 +164,7 @@
         fs.delete(fInfo.getTempPath());
       }
       // done with the file
-      if( removeFromMap ){
+      if (removeFromMap){
         finalNameToFileInfo.remove(fPath);
       }
     }
@@ -178,7 +178,7 @@
    * @throws IOException if any file fails to commit
    */
   public void commit() throws IOException {
-    for( Path fPath : finalNameToFileInfo.keySet()){
+    for(Path fPath : finalNameToFileInfo.keySet()){
       commit(fPath, false);  
     }
     // safe to clear map now
@@ -197,14 +197,14 @@
   // if aborting using this method while iterating.  
   private void abort(Path p, boolean removeFromMap) throws IOException{
     FileInfo fInfo = finalNameToFileInfo.get(p); 
-    if( null != fInfo ){
+    if (null != fInfo){
       try{
         fInfo.getOpenFileStream().close();
       }catch(IOException ioe){
         // ignore if already closed
       }
       fs.delete(fInfo.getTempPath()); 
-      if( removeFromMap ){
+      if (removeFromMap){
         finalNameToFileInfo.remove(p);
       }
     }
@@ -217,7 +217,7 @@
    * @throws IOException
    */
   public void abort() throws IOException {
-    for(Path fPath : finalNameToFileInfo.keySet() ){
+    for(Path fPath : finalNameToFileInfo.keySet()){
       abort(fPath, false); 
     }
     // safe to clean now
@@ -225,8 +225,8 @@
   }
   
   @Override
-    public boolean setReplication(
-                                  Path src, short replication)
+  public boolean setReplication(
+                                Path src, short replication)
     throws IOException {
     // throw IOException for interface compatibility with 
     // base class. 
@@ -234,15 +234,15 @@
   }
 
   @Override
-    public boolean rename(
-                          Path src, Path dst)
+  public boolean rename(
+                        Path src, Path dst)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public boolean delete(
-                          Path f)
+  public boolean delete(
+                        Path f)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
@@ -264,54 +264,54 @@
   }
 
   @Override
-    public void copyFromLocalFile(
-                                  boolean delSrc, Path src, Path dst)
+  public void copyFromLocalFile(
+                                boolean delSrc, Path src, Path dst)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public void copyToLocalFile(
-                                boolean delSrc, Path src, Path dst)
+  public void copyToLocalFile(
+                              boolean delSrc, Path src, Path dst)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public Path startLocalOutput(
-                                 Path fsOutputFile, Path tmpLocalFile)
+  public Path startLocalOutput(
+                               Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public void completeLocalOutput(
-                                    Path fsOutputFile, Path tmpLocalFile)
+  public void completeLocalOutput(
+                                  Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public String[][] getFileCacheHints(
-                                        Path f, long start, long len)
+  public String[][] getFileCacheHints(
+                                      Path f, long start, long len)
     throws IOException {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   @Override
-    public String getName() {
+  public String getName() {
     throw new UnsupportedOperationException("Operation not supported");  
   }
 
   private class FileInfo {
-    private Path tempPath ;
-    private Path finalPath ; 
-    private OutputStream openFileStream ; 
-    private boolean overwrite ;
+    private Path tempPath;
+    private Path finalPath; 
+    private OutputStream openFileStream; 
+    private boolean overwrite;
     
     FileInfo(Path tempPath, Path finalPath, boolean overwrite){
-      this.tempPath = tempPath ; 
-      this.finalPath = finalPath ; 
+      this.tempPath = tempPath; 
+      this.finalPath = finalPath; 
       this.overwrite = overwrite; 
     }
     public OutputStream getOpenFileStream() {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Thu Apr 19 14:34:41 2007
@@ -302,7 +302,7 @@
     SequenceFile.Sorter.RawKeyValueIterator rIter;
  
     try {
-      setPhase(TaskStatus.Phase.SORT) ; 
+      setPhase(TaskStatus.Phase.SORT); 
       sortProgress.start();
 
       // sort the input file
@@ -323,15 +323,15 @@
     // make output collector
     String finalName = getOutputName(getPartition());
     boolean runSpeculative = job.getSpeculativeExecution();
-    FileSystem fs = FileSystem.get(job) ;
+    FileSystem fs = FileSystem.get(job);
 
-    if( runSpeculative ){
+    if (runSpeculative){
       fs = new PhasedFileSystem (fs , 
                                  getJobId(), getTipId(), getTaskId());
     }
     
     final RecordWriter out = 
-      job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter) ;  
+      job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter);  
     
     OutputCollector collector = new OutputCollector() {
         public void collect(WritableComparable key, Writable value)
@@ -361,17 +361,17 @@
       out.close(reporter);
       //End of clean up.
       
-      if( runSpeculative ){
+      if (runSpeculative){
         ((PhasedFileSystem)fs).commit(); 
       }
-    } catch ( IOException ioe ) {
+    } catch (IOException ioe) {
       try {
         reducer.close();
-      } catch ( IOException ignored ) {}
+      } catch (IOException ignored) {}
         
       try {
         out.close(reporter);
-      } catch ( IOException ignored ) {}
+      } catch (IOException ignored) {}
       
       throw ioe;
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java Thu Apr 19 14:34:41 2007
@@ -74,7 +74,7 @@
    * @param filterClass filter class
    */
   public static void setFilterClass(Configuration conf, Class filterClass) {
-    conf.set(FILTER_CLASS, filterClass.getName() );
+    conf.set(FILTER_CLASS, filterClass.getName());
   }
 
          
@@ -109,7 +109,7 @@
      * @param conf where the regex is set
      * @param regex regex used as a filter
      */
-    public static void setPattern(Configuration conf, String regex )
+    public static void setPattern(Configuration conf, String regex)
       throws PatternSyntaxException {
       try {
         Pattern.compile(regex);
@@ -125,7 +125,7 @@
      */
     public void setConf(Configuration conf) {
       String regex = conf.get(FILTER_REGEX);
-      if(regex==null)
+      if (regex==null)
         throw new RuntimeException(FILTER_REGEX + "not set");
       this.p = Pattern.compile(regex);
       this.conf = conf;
@@ -154,8 +154,8 @@
      * @param conf configuration
      * @param frequency filtering frequencey
      */
-    public static void setFrequency(Configuration conf, int frequency ){
-      if(frequency<=0)
+    public static void setFrequency(Configuration conf, int frequency){
+      if (frequency<=0)
         throw new IllegalArgumentException(
                                            "Negative " + FILTER_FREQUENCY + ": "+frequency);
       conf.setInt(FILTER_FREQUENCY, frequency);
@@ -169,7 +169,7 @@
      */
     public void setConf(Configuration conf) {
       this.frequency = conf.getInt("sequencefile.filter.frequency", 10);
-      if(this.frequency <=0 ) {
+      if (this.frequency <=0) {
         throw new RuntimeException(
                                    "Negative "+FILTER_FREQUENCY+": "+this.frequency);
       }
@@ -182,9 +182,9 @@
      */
     public boolean accept(Writable key) {
       boolean accepted = false;
-      if(count == 0)
+      if (count == 0)
         accepted = true;
-      if( ++count == frequency ) {
+      if (++count == frequency) {
         count = 0;
       }
       return accepted;
@@ -215,8 +215,8 @@
      * @param conf configuration
      * @param frequency filtering frequency
      */
-    public static void setFrequency(Configuration conf, int frequency ){
-      if(frequency<=0)
+    public static void setFrequency(Configuration conf, int frequency){
+      if (frequency<=0)
         throw new IllegalArgumentException(
                                            "Negative " + FILTER_FREQUENCY + ": "+frequency);
       conf.setInt(FILTER_FREQUENCY, frequency);
@@ -230,7 +230,7 @@
      */
     public void setConf(Configuration conf) {
       this.frequency = conf.getInt(FILTER_FREQUENCY, 10);
-      if(this.frequency <=0 ) {
+      if (this.frequency <=0) {
         throw new RuntimeException(
                                    "Negative "+FILTER_FREQUENCY+": "+this.frequency);
       }
@@ -244,16 +244,16 @@
     public boolean accept(Writable key) {
       try {
         long hashcode;
-        if( key instanceof Text) {
+        if (key instanceof Text) {
           hashcode = MD5Hashcode((Text)key);
-        } else if( key instanceof BytesWritable) {
+        } else if (key instanceof BytesWritable) {
           hashcode = MD5Hashcode((BytesWritable)key);
         } else {
           ByteBuffer bb;
           bb = Text.encode(key.toString());
-          hashcode = MD5Hashcode(bb.array(),0, bb.limit());
+          hashcode = MD5Hashcode(bb.array(), 0, bb.limit());
         }
-        if(hashcode/frequency*frequency==hashcode)
+        if (hashcode/frequency*frequency==hashcode)
           return true;
       } catch(Exception e) {
         LOG.warn(e);
@@ -270,7 +270,7 @@
       return MD5Hashcode(key.get(), 0, key.getSize());
     }
     synchronized private long MD5Hashcode(byte[] bytes, 
-                                          int start, int length ) throws DigestException {
+                                          int start, int length) throws DigestException {
       DIGESTER.update(bytes, 0, length);
       DIGESTER.digest(digest, 0, MD5_LEN);
       long hashcode=0;
@@ -295,7 +295,7 @@
     public synchronized boolean next(Writable key, Writable value)
       throws IOException {
       while (next(key)) {
-        if(filter.accept(key)) {
+        if (filter.accept(key)) {
           getCurrentValue(value);
           return true;
         }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java Thu Apr 19 14:34:41 2007
@@ -38,7 +38,7 @@
 
   public RecordWriter getRecordWriter(FileSystem ignored, JobConf job,
                                       String name, Progressable progress)
-                                      throws IOException {
+    throws IOException {
 
     Path file = new Path(job.getOutputPath(), name);
     FileSystem fs = file.getFileSystem(job);
@@ -51,15 +51,15 @@
       // find the right codec
       Class codecClass = getOutputCompressorClass(job, DefaultCodec.class);
       codec = (CompressionCodec) 
-                 ReflectionUtils.newInstance(codecClass, job);
+        ReflectionUtils.newInstance(codecClass, job);
     }
     final SequenceFile.Writer out = 
       SequenceFile.createWriter(fs, job, file,
-                              job.getOutputKeyClass(),
-                              job.getOutputValueClass(),
-                              compressionType,
-                              codec,
-                              progress);
+                                job.getOutputKeyClass(),
+                                job.getOutputValueClass(),
+                                compressionType,
+                                codec,
+                                progress);
 
     return new RecordWriter() {
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java Thu Apr 19 14:34:41 2007
@@ -80,21 +80,21 @@
   }
   
   protected synchronized boolean next(Writable key)
-      throws IOException {
-      if (!more) return false;
-      long pos = in.getPosition();
-      boolean eof = in.next(key);
-      if (pos >= end && in.syncSeen()) {
-          more = false;
-      } else {
-          more = eof;
-      }
-      return more;
+    throws IOException {
+    if (!more) return false;
+    long pos = in.getPosition();
+    boolean eof = in.next(key);
+    if (pos >= end && in.syncSeen()) {
+      more = false;
+    } else {
+      more = eof;
+    }
+    return more;
   }
   
   protected synchronized void getCurrentValue(Writable value)
-      throws IOException {
-      in.getCurrentValue(value);
+    throws IOException {
+    in.getCurrentValue(value);
   }
   
   /**
@@ -114,7 +114,7 @@
   }
   
   protected synchronized void seek(long pos) throws IOException {
-      in.seek(pos);
+    in.seek(pos);
   }
   public synchronized void close() throws IOException { in.close(); }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java Thu Apr 19 14:34:41 2007
@@ -72,7 +72,7 @@
 
     // set up the context for "/logs/" if "hadoop.log.dir" property is defined. 
     String logDir = System.getProperty("hadoop.log.dir");
-    if( logDir != null ) {
+    if (logDir != null) {
       HttpContext logContext = new HttpContext();
       logContext.setContextPath("/logs/*");
       logContext.setResourceBase(logDir);
@@ -101,7 +101,7 @@
    * @param value The value of the attribute
    */
   public void setAttribute(String name, Object value) {
-    webAppContext.setAttribute(name,value);
+    webAppContext.setAttribute(name, value);
   }
 
   /**
@@ -153,7 +153,7 @@
    */
   private static String getWebAppsPath() throws IOException {
     URL url = StatusHttpServer.class.getClassLoader().getResource("webapps");
-    if( url == null ) 
+    if (url == null) 
       throw new IOException("webapps not found in CLASSPATH"); 
     return url.toString();
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java Thu Apr 19 14:34:41 2007
@@ -56,9 +56,9 @@
   private String jobFile;                         // job configuration file
   private String taskId;                          // unique, includes job id
   private String jobId;                           // unique jobid
-  private String tipId ;
+  private String tipId;
   private int partition;                          // id within job
-  private TaskStatus.Phase phase ;                // current phase of the task 
+  private TaskStatus.Phase phase;                 // current phase of the task 
   
 
   ////////////////////////////////////////////
@@ -82,7 +82,7 @@
   public void setJobFile(String jobFile) { this.jobFile = jobFile; }
   public String getJobFile() { return jobFile; }
   public String getTaskId() { return taskId; }
-  public String getTipId(){ return tipId ; }
+  public String getTipId(){ return tipId; }
   public Counters getCounters() { return counters; }
   
   /**
@@ -105,14 +105,14 @@
    * @return
    */
   public TaskStatus.Phase getPhase(){
-    return this.phase ; 
+    return this.phase; 
   }
   /**
    * Set current phase of the task. 
    * @param p
    */
   protected void setPhase(TaskStatus.Phase p){
-    this.phase = p ; 
+    this.phase = p; 
   }
 
   ////////////////////////////////////////////
@@ -142,7 +142,7 @@
   public void localizeConfiguration(JobConf conf) throws IOException {
     conf.set("mapred.tip.id", tipId); 
     conf.set("mapred.task.id", taskId);
-    conf.setBoolean("mapred.task.is.map",isMapTask());
+    conf.setBoolean("mapred.task.is.map", isMapTask());
     conf.setInt("mapred.task.partition", partition);
     conf.set("mapred.job.id", jobId);
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskCompletionEvent.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskCompletionEvent.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskCompletionEvent.java Thu Apr 19 14:34:41 2007
@@ -14,11 +14,11 @@
 public class TaskCompletionEvent implements Writable{
   static public enum Status {FAILED, SUCCEEDED, OBSOLETE};
     
-  private int eventId ; 
-  private String taskTrackerHttp ;
-  private String taskId ;
-  Status status ; 
-  boolean isMap = false ;
+  private int eventId; 
+  private String taskTrackerHttp;
+  private String taskId;
+  Status status; 
+  boolean isMap = false;
   private int idWithinJob;
   public static final TaskCompletionEvent[] EMPTY_ARRAY = 
     new TaskCompletionEvent[0];
@@ -43,12 +43,12 @@
                              Status status, 
                              String taskTrackerHttp){
       
-    this.taskId = taskId ;
-    this.idWithinJob = idWithinJob ;
-    this.isMap = isMap ;
-    this.eventId = eventId ; 
-    this.status =status ; 
-    this.taskTrackerHttp = taskTrackerHttp ;
+    this.taskId = taskId;
+    this.idWithinJob = idWithinJob;
+    this.isMap = isMap;
+    this.eventId = eventId; 
+    this.status =status; 
+    this.taskTrackerHttp = taskTrackerHttp;
   }
   /**
    * Returns event Id. 
@@ -114,9 +114,9 @@
   public String toString(){
     StringBuffer buf = new StringBuffer(); 
     buf.append("Task Id : "); 
-    buf.append( taskId ) ; 
+    buf.append(taskId); 
     buf.append(", Status : ");  
-    buf.append( status.name() ) ;
+    buf.append(status.name());
     return buf.toString();
   }
     
@@ -139,7 +139,7 @@
   }
   
   public void readFields(DataInput in) throws IOException {
-    this.taskId = WritableUtils.readString(in) ; 
+    this.taskId = WritableUtils.readString(in); 
     this.idWithinJob = WritableUtils.readVInt(in);
     this.isMap = in.readBoolean();
     this.status = WritableUtils.readEnum(in, Status.class);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Thu Apr 19 14:34:41 2007
@@ -75,8 +75,8 @@
   private double progress = 0;
   private String state = "";
   private long startTime = 0;
-  private long execStartTime = 0 ;
-  private long execFinishTime = 0 ;
+  private long execStartTime = 0;
+  private long execFinishTime = 0;
   private int completes = 0;
   private boolean failed = false;
   private boolean killed = false;
@@ -269,12 +269,12 @@
     // then true
     TaskStatus ts = (TaskStatus) taskStatuses.get(taskid);
     if ((ts != null) &&
-        (! tasksReportedClosed.contains(taskid)) &&
+        (!tasksReportedClosed.contains(taskid)) &&
         (job.getStatus().getRunState() != JobStatus.RUNNING)) {
       tasksReportedClosed.add(taskid);
       return true;
-    } else if( !isMapTask() && isComplete() && 
-               ! tasksReportedClosed.contains(taskid) ){
+    } else if (!isMapTask() && isComplete() && 
+               !tasksReportedClosed.contains(taskid)){
       tasksReportedClosed.add(taskid);
       return true; 
     }
@@ -298,7 +298,7 @@
        diagnostics.toArray(new String[diagnostics.size()]),
        execStartTime, execFinishTime, counters);
       
-    return report ;
+    return report;
   }
 
   /**
@@ -383,7 +383,7 @@
     if (status != null) {
       status.setRunState(TaskStatus.State.FAILED);
       // tasktracker went down and failed time was not reported. 
-      if( 0 == status.getFinishTime() ){
+      if (0 == status.getFinishTime()){
         status.setFinishTime(System.currentTimeMillis());
       }
     }
@@ -487,7 +487,7 @@
       double bestProgress = 0;
       String bestState = "";
       Counters bestCounters = new Counters();
-      for (Iterator it = taskStatuses.keySet().iterator(); it.hasNext(); ) {
+      for (Iterator it = taskStatuses.keySet().iterator(); it.hasNext();) {
         String taskid = (String) it.next();
         TaskStatus status = taskStatuses.get(taskid);
         if (status.getRunState() == TaskStatus.State.SUCCEEDED) {
@@ -533,7 +533,7 @@
     // in more depth eventually...
     //
       
-    if( activeTasks.size() <= MAX_TASK_EXECS &&
+    if (activeTasks.size() <= MAX_TASK_EXECS &&
         runSpeculative &&
         (averageProgress - progress >= SPECULATIVE_GAP) &&
         (System.currentTimeMillis() - startTime >= SPECULATIVE_LAG) 
@@ -548,7 +548,7 @@
    */
   public Task getTaskToRun(String taskTracker) throws IOException {
     Task t = null;
-    if( 0 == execStartTime ){
+    if (0 == execStartTime){
       // assume task starts running now
       execStartTime = System.currentTimeMillis();
     }
@@ -597,7 +597,7 @@
    */
   public boolean hasRunOnMachine(String tracker){
     return this.activeTasks.values().contains(tracker) || 
-      hasFailedOnMachine(tracker) ;
+      hasFailedOnMachine(tracker);
   }
   /**
    * Get the number of machines where this task has failed.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLogAppender.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLogAppender.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLogAppender.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLogAppender.java Thu Apr 19 14:34:41 2007
@@ -42,7 +42,7 @@
 
     if (this.layout == null) {
       errorHandler.error("No layout for appender " + name , 
-                         null, ErrorCode.MISSING_LAYOUT );
+                         null, ErrorCode.MISSING_LAYOUT);
     }
     
     // Log the message to the task's log

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskReport.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskReport.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskReport.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskReport.java Thu Apr 19 14:34:41 2007
@@ -31,7 +31,7 @@
   private float progress;
   private String state;
   private String[] diagnostics;
-  private long startTime ; 
+  private long startTime; 
   private long finishTime; 
   private Counters counters;
 
@@ -44,8 +44,8 @@
     this.progress = progress;
     this.state = state;
     this.diagnostics = diagnostics;
-    this.startTime = startTime ; 
-    this.finishTime = finishTime ;
+    this.startTime = startTime; 
+    this.finishTime = finishTime;
     this.counters = counters;
   }
     
@@ -108,7 +108,7 @@
     this.progress = in.readFloat();
     this.state = Text.readString(in);
     this.startTime = in.readLong(); 
-    this.finishTime = in.readLong() ;
+    this.finishTime = in.readLong();
     
     diagnostics = WritableUtils.readStringArray(in);
     counters = new Counters();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Thu Apr 19 14:34:41 2007
@@ -140,7 +140,7 @@
         LOG.warn(StringUtils.stringifyException(ie));
       }
       
-      if (! prepare()) {
+      if (!prepare()) {
         return;
       }
 
@@ -182,8 +182,8 @@
         if (localArchives != null){
           for (int i=0;i<archives.length;i++){
             for(int j=0;j<archiveClasspaths.length;j++){
-              if(archives[i].getPath().equals(
-                                              archiveClasspaths[j].toString())){
+              if (archives[i].getPath().equals(
+                                               archiveClasspaths[j].toString())){
                 classPath.append(sep);
                 classPath.append(localArchives[i]
                                  .toString());
@@ -194,7 +194,7 @@
       }
       //file paths
       Path[] fileClasspaths = DistributedCache.getFileClassPaths(conf);
-      if(fileClasspaths!=null && files != null) {
+      if (fileClasspaths!=null && files != null) {
         Path[] localFiles = DistributedCache
           .getLocalCacheFiles(conf);
         if (localFiles != null) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java Thu Apr 19 14:34:41 2007
@@ -45,12 +45,12 @@
   private String stateString;
   private String taskTracker;
     
-  private long startTime ; 
-  private long finishTime ; 
+  private long startTime; 
+  private long finishTime; 
     
   // only for reduce tasks
-  private long shuffleFinishTime ; 
-  private long sortFinishTime ; 
+  private long shuffleFinishTime; 
+  private long sortFinishTime; 
     
   private Phase phase = Phase.STARTING; 
   private Counters counters;
@@ -68,7 +68,7 @@
     this.diagnosticInfo = diagnosticInfo;
     this.stateString = stateString;
     this.taskTracker = taskTracker;
-    this.phase = phase ;
+    this.phase = phase;
     this.counters = counters;
   }
     
@@ -101,11 +101,11 @@
    * @param finishTime finish time of task.
    */
   void setFinishTime(long finishTime) {
-    if( shuffleFinishTime == 0 ) {
-      this.shuffleFinishTime = finishTime ; 
+    if (shuffleFinishTime == 0) {
+      this.shuffleFinishTime = finishTime; 
     }
-    if( sortFinishTime == 0 ){
-      this.sortFinishTime = finishTime ;
+    if (sortFinishTime == 0){
+      this.sortFinishTime = finishTime;
     }
     this.finishTime = finishTime;
   }
@@ -147,8 +147,8 @@
    */
   void setSortFinishTime(long sortFinishTime) {
     this.sortFinishTime = sortFinishTime;
-    if( 0 == this.shuffleFinishTime){
-      this.shuffleFinishTime = sortFinishTime ;
+    if (0 == this.shuffleFinishTime){
+      this.shuffleFinishTime = sortFinishTime;
     }
   }
 
@@ -180,7 +180,7 @@
    * @param p
    */
   void setPhase(Phase p){
-    this.phase = p ; 
+    this.phase = p; 
   }
   /**
    * Get task's counters.
@@ -209,7 +209,7 @@
     WritableUtils.writeEnum(out, phase);
     out.writeLong(startTime);
     out.writeLong(finishTime);
-    if(! isMap){
+    if (!isMap){
       out.writeLong(shuffleFinishTime);
       out.writeLong(sortFinishTime);
     }
@@ -225,8 +225,8 @@
     this.stateString = UTF8.readString(in);
     this.phase = WritableUtils.readEnum(in, Phase.class); 
     this.startTime = in.readLong(); 
-    this.finishTime = in.readLong() ; 
-    if( ! this.isMap ){
+    this.finishTime = in.readLong(); 
+    if (!this.isMap){
       shuffleFinishTime = in.readLong(); 
       sortFinishTime = in.readLong(); 
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Thu Apr 19 14:34:41 2007
@@ -1003,7 +1003,7 @@
    */
   private TaskInProgress findTaskToKill() {
     TaskInProgress killMe = null;
-    for (Iterator it = runningTasks.values().iterator(); it.hasNext(); ) {
+    for (Iterator it = runningTasks.values().iterator(); it.hasNext();) {
       TaskInProgress tip = (TaskInProgress) it.next();
       if ((tip.getRunState() == TaskStatus.State.RUNNING) &&
           !tip.wasKilled) {
@@ -1116,7 +1116,7 @@
         boolean staleState = false;
         try {
           // This while-loop attempts reconnects if we get network errors
-          while (running && ! staleState && !shuttingDown && !denied) {
+          while (running && !staleState && !shuttingDown && !denied) {
             try {
               State osState = offerService();
               if (osState == State.STALE) {
@@ -1170,7 +1170,7 @@
     private JobConf localJobConf;
     private boolean keepFailedTaskFiles;
     private boolean alwaysKeepTaskFiles;
-    private TaskStatus taskStatus ; 
+    private TaskStatus taskStatus; 
     private boolean keepJobFiles;
     private long taskTimeout;
         
@@ -1285,12 +1285,12 @@
       this.progress = p;
       this.runstate = TaskStatus.State.RUNNING;
       this.lastProgressReport = System.currentTimeMillis();
-      TaskStatus.Phase oldPhase = taskStatus.getPhase() ;
-      if( oldPhase != newPhase ){
+      TaskStatus.Phase oldPhase = taskStatus.getPhase();
+      if (oldPhase != newPhase){
         // sort phase started
-        if( newPhase == TaskStatus.Phase.SORT ){
+        if (newPhase == TaskStatus.Phase.SORT){
           this.taskStatus.setShuffleFinishTime(System.currentTimeMillis());
-        }else if( newPhase == TaskStatus.Phase.REDUCE){
+        }else if (newPhase == TaskStatus.Phase.REDUCE){
           this.taskStatus.setSortFinishTime(System.currentTimeMillis());
         }
         this.taskStatus.setPhase(newPhase);
@@ -1347,7 +1347,7 @@
       // Wait until task reports as done.  If it hasn't reported in,
       // wait for a second and try again.
       //
-      while (! done && (System.currentTimeMillis() - start < WAIT_FOR_DONE)) {
+      while (!done && (System.currentTimeMillis() - start < WAIT_FOR_DONE)) {
         try {
           Thread.sleep(1000);
         } catch (InterruptedException ie) {
@@ -1414,11 +1414,11 @@
           String systemDir = task.getConf().get("mapred.system.dir");
           Path taskTempDir = new Path(systemDir + "/" + 
                                       task.getJobId() + "/" + task.getTipId() + "/" + task.getTaskId());
-          if( fs.exists(taskTempDir)){
-            fs.delete(taskTempDir) ;
+          if (fs.exists(taskTempDir)){
+            fs.delete(taskTempDir);
           }
         }catch(IOException e){
-          LOG.warn("Error in deleting reduce temporary output",e); 
+          LOG.warn("Error in deleting reduce temporary output", e); 
         }
       }
     }
@@ -1803,24 +1803,24 @@
    * @throws DiskErrorException if all local directories are not writable
    * @author hairong
    */
-  private static void checkLocalDirs( String[] localDirs ) 
+  private static void checkLocalDirs(String[] localDirs) 
     throws DiskErrorException {
     boolean writable = false;
         
-    if( localDirs != null ) {
+    if (localDirs != null) {
       for (int i = 0; i < localDirs.length; i++) {
         try {
-          DiskChecker.checkDir( new File(localDirs[i]) );
+          DiskChecker.checkDir(new File(localDirs[i]));
           writable = true;
-        } catch( DiskErrorException e ) {
-          LOG.warn("Task Tracker local " + e.getMessage() );
+        } catch(DiskErrorException e) {
+          LOG.warn("Task Tracker local " + e.getMessage());
         }
       }
     }
 
-    if( !writable )
-      throw new DiskErrorException( 
-                                   "all local directories are not writable" );
+    if (!writable)
+      throw new DiskErrorException(
+                                   "all local directories are not writable");
   }
     
   /**
@@ -1845,9 +1845,9 @@
       ReflectionUtils.setContentionTracing
         (conf.getBoolean("tasktracker.contention.tracking", false));
       new TaskTracker(conf).run();
-    } catch ( Throwable e ) {
-      LOG.error( "Can not start task tracker because "+
-                 StringUtils.stringifyException(e) );
+    } catch (Throwable e) {
+      LOG.error("Can not start task tracker because "+
+                StringUtils.stringifyException(e));
       System.exit(-1);
     }
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerAction.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerAction.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerAction.java Thu Apr 19 14:34:41 2007
@@ -62,26 +62,26 @@
     TaskTrackerAction action = null;
     
     switch (actionType) {
-      case LAUNCH_TASK:
-        {
-          action = new LaunchTaskAction();
-        }
-        break;
-        case KILL_TASK:
-        {
-          action = new KillTaskAction();
-        }
-        break;
-      case KILL_JOB:
-        {
-          action = new KillJobAction();
-        }
-        break;
-      case REINIT_TRACKER:
-        {
-          action = new ReinitTrackerAction();
-        }
-        break;
+    case LAUNCH_TASK:
+      {
+        action = new LaunchTaskAction();
+      }
+      break;
+    case KILL_TASK:
+      {
+        action = new KillTaskAction();
+      }
+      break;
+    case KILL_JOB:
+      {
+        action = new KillJobAction();
+      }
+      break;
+    case REINIT_TRACKER:
+      {
+        action = new ReinitTrackerAction();
+      }
+      break;
     }
 
     return action;



Mime
View raw message