hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r530556 [8/12] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/hbase/src/java/org/apache/hadoop/hbase/ src/contrib/hbase/src/test/org/apache/hadoop/hbase/ src/contrib/streaming/src/java/org/ap...
Date Thu, 19 Apr 2007 21:34:53 GMT
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3InputStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3InputStream.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3InputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3InputStream.java Thu Apr 19 14:34:41 2007
@@ -37,65 +37,65 @@
   }
 
   @Override
-    public synchronized long getPos() throws IOException {
-      return pos;
-    }
+  public synchronized long getPos() throws IOException {
+    return pos;
+  }
 
   @Override
-    public synchronized int available() throws IOException {
-      return (int) (fileLength - pos);
-    }
+  public synchronized int available() throws IOException {
+    return (int) (fileLength - pos);
+  }
 
   @Override
-    public synchronized void seek(long targetPos) throws IOException {
-      if (targetPos > fileLength) {
-        throw new IOException("Cannot seek after EOF");
-      }
-      pos = targetPos;
-      blockEnd = -1;
+  public synchronized void seek(long targetPos) throws IOException {
+    if (targetPos > fileLength) {
+      throw new IOException("Cannot seek after EOF");
     }
+    pos = targetPos;
+    blockEnd = -1;
+  }
 
   @Override
-    public synchronized boolean seekToNewSource(long targetPos) throws IOException {
-      return false;
-    }
+  public synchronized boolean seekToNewSource(long targetPos) throws IOException {
+    return false;
+  }
 
   @Override
-    public synchronized int read() throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
+  public synchronized int read() throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
+    int result = -1;
+    if (pos < fileLength) {
+      if (pos > blockEnd) {
+        blockSeekTo(pos);
       }
-      int result = -1;
-      if (pos < fileLength) {
-        if (pos > blockEnd) {
-          blockSeekTo(pos);
-        }
-        result = blockStream.read();
-        if (result >= 0) {
-          pos++;
-        }
+      result = blockStream.read();
+      if (result >= 0) {
+        pos++;
       }
-      return result;
     }
+    return result;
+  }
 
   @Override
-    public synchronized int read(byte buf[], int off, int len) throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
-      }
-      if (pos < fileLength) {
-        if (pos > blockEnd) {
-          blockSeekTo(pos);
-        }
-        int realLen = Math.min(len, (int) (blockEnd - pos + 1));
-        int result = blockStream.read(buf, off, realLen);
-        if (result >= 0) {
-          pos += result;
-        }
-        return result;
+  public synchronized int read(byte buf[], int off, int len) throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
+    if (pos < fileLength) {
+      if (pos > blockEnd) {
+        blockSeekTo(pos);
+      }
+      int realLen = Math.min(len, (int) (blockEnd - pos + 1));
+      int result = blockStream.read(buf, off, realLen);
+      if (result >= 0) {
+        pos += result;
       }
-      return -1;
+      return result;
     }
+    return -1;
+  }
 
   private synchronized void blockSeekTo(long target) throws IOException {
     //
@@ -132,7 +132,7 @@
   }
 
   @Override
-    public void close() throws IOException {
+  public void close() throws IOException {
     if (closed) {
       throw new IOException("Stream closed");
     }
@@ -151,17 +151,17 @@
    * We don't support marks.
    */
   @Override
-    public boolean markSupported() {
+  public boolean markSupported() {
     return false;
   }
 
   @Override
-    public void mark(int readLimit) {
+  public void mark(int readLimit) {
     // Do nothing
   }
 
   @Override
-    public void reset() throws IOException {
+  public void reset() throws IOException {
     throw new IOException("Mark not supported");
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3OutputStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3OutputStream.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3OutputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3OutputStream.java Thu Apr 19 14:34:41 2007
@@ -74,52 +74,52 @@
   }
 
   @Override
-    public synchronized void write(int b) throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
-      }
+  public synchronized void write(int b) throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
 
-      if ((bytesWrittenToBlock + pos == blockSize) || (pos >= bufferSize)) {
-        flush();
-      }
-      outBuf[pos++] = (byte) b;
-      filePos++;
+    if ((bytesWrittenToBlock + pos == blockSize) || (pos >= bufferSize)) {
+      flush();
     }
+    outBuf[pos++] = (byte) b;
+    filePos++;
+  }
 
   @Override
-    public synchronized void write(byte b[], int off, int len) throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
-      }
-      while (len > 0) {
-        int remaining = bufferSize - pos;
-        int toWrite = Math.min(remaining, len);
-        System.arraycopy(b, off, outBuf, pos, toWrite);
-        pos += toWrite;
-        off += toWrite;
-        len -= toWrite;
-        filePos += toWrite;
-
-        if ((bytesWrittenToBlock + pos >= blockSize) || (pos == bufferSize)) {
-          flush();
-        }
+  public synchronized void write(byte b[], int off, int len) throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
+    while (len > 0) {
+      int remaining = bufferSize - pos;
+      int toWrite = Math.min(remaining, len);
+      System.arraycopy(b, off, outBuf, pos, toWrite);
+      pos += toWrite;
+      off += toWrite;
+      len -= toWrite;
+      filePos += toWrite;
+
+      if ((bytesWrittenToBlock + pos >= blockSize) || (pos == bufferSize)) {
+        flush();
       }
     }
+  }
 
   @Override
-    public synchronized void flush() throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
-      }
+  public synchronized void flush() throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
 
-      if (bytesWrittenToBlock + pos >= blockSize) {
-        flushData((int) blockSize - bytesWrittenToBlock);
-      }
-      if (bytesWrittenToBlock == blockSize) {
-        endBlock();
-      }
-      flushData(pos);
+    if (bytesWrittenToBlock + pos >= blockSize) {
+      flushData((int) blockSize - bytesWrittenToBlock);
+    }
+    if (bytesWrittenToBlock == blockSize) {
+      endBlock();
     }
+    flushData(pos);
+  }
 
   private synchronized void flushData(int maxPos) throws IOException {
     int workingPos = Math.min(pos, maxPos);
@@ -179,22 +179,22 @@
   }
 
   @Override
-    public synchronized void close() throws IOException {
-      if (closed) {
-        throw new IOException("Stream closed");
-      }
+  public synchronized void close() throws IOException {
+    if (closed) {
+      throw new IOException("Stream closed");
+    }
 
-      flush();
-      if (filePos == 0 || bytesWrittenToBlock != 0) {
-        endBlock();
-      }
+    flush();
+    if (filePos == 0 || bytesWrittenToBlock != 0) {
+      endBlock();
+    }
 
-      backupStream.close();
-      backupFile.delete();
+    backupStream.close();
+    backupFile.delete();
 
-      super.close();
+    super.close();
 
-      closed = true;
-    }
+    closed = true;
+  }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java Thu Apr 19 14:34:41 2007
@@ -197,7 +197,7 @@
                        byte[] b2, int s2, int l2) {
       int size1 = readInt(b1, s1);
       int size2 = readInt(b2, s2);
-      return compareBytes(b1,s1+4, size1, b2, s2+4, size2);
+      return compareBytes(b1, s1+4, size1, b2, s2+4, size2);
     }
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/GenericWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/GenericWritable.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/GenericWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/GenericWritable.java Thu Apr 19 14:34:41 2007
@@ -70,7 +70,7 @@
       }
     }
     throw new RuntimeException("The type of instance is: "
-                + instance.getClass() + ", which is NOT registered.");
+                               + instance.getClass() + ", which is NOT registered.");
   }
 
   /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Thu Apr 19 14:34:41 2007
@@ -81,7 +81,7 @@
                   Class keyClass, Class valClass,
                   CompressionType compress, Progressable progress)
       throws IOException {
-      this(conf,fs,dirName,WritableComparator.get(keyClass),valClass,
+      this(conf, fs, dirName, WritableComparator.get(keyClass), valClass,
            compress, progress);
     }
 
@@ -89,7 +89,7 @@
     public Writer(Configuration conf, FileSystem fs, String dirName,
                   Class keyClass, Class valClass, CompressionType compress)
       throws IOException {
-      this(conf,fs,dirName,WritableComparator.get(keyClass),valClass,compress);
+      this(conf, fs, dirName, WritableComparator.get(keyClass), valClass, compress);
     }
 
     /** Create the named map using the named key comparator. */
@@ -101,8 +101,8 @@
     }
     /** Create the named map using the named key comparator. */
     public Writer(Configuration conf, FileSystem fs, String dirName,
-                 WritableComparator comparator, Class valClass,
-                 SequenceFile.CompressionType compress)
+                  WritableComparator comparator, Class valClass,
+                  SequenceFile.CompressionType compress)
       throws IOException {
       this(conf, fs, dirName, comparator, valClass, compress, null);
     }
@@ -118,7 +118,7 @@
 
       Path dir = new Path(dirName);
       if (!fs.mkdirs(dir)) {
-          throw new IOException("Mkdirs failed to create directory " + dir.toString());
+        throw new IOException("Mkdirs failed to create directory " + dir.toString());
       }
       Path dataFile = new Path(dir, DATA_FILE_NAME);
       Path indexFile = new Path(dir, INDEX_FILE_NAME);
@@ -126,7 +126,7 @@
       Class keyClass = comparator.getKeyClass();
       this.data =
         SequenceFile.createWriter
-        (fs,conf,dataFile,keyClass,valClass,compress,progress);
+        (fs, conf, dataFile, keyClass, valClass, compress, progress);
       this.index =
         SequenceFile.createWriter
         (fs, conf, indexFile, keyClass, LongWritable.class,
@@ -182,8 +182,8 @@
   public static class Reader {
       
     /** Number of index entries to skip between each entry.  Zero by default.
-    * Setting this to values larger than zero can facilitate opening large map
-    * files using less memory. */
+     * Setting this to values larger than zero can facilitate opening large map
+     * files using less memory. */
     private int INDEX_SKIP = 0;
       
     private WritableComparator comparator;
@@ -286,7 +286,7 @@
         }
       } catch (EOFException e) {
         SequenceFile.LOG.warn("Unexpected EOF reading " + index +
-                                 " at entry #" + count + ".  Ignoring.");
+                              " at entry #" + count + ".  Ignoring.");
       } finally {
 	indexClosed = true;
         index.close();
@@ -306,7 +306,7 @@
 
       readIndex();
       int pos = ((count - 1) / 2);              // middle of the index
-      if(pos < 0) {
+      if (pos < 0) {
         throw new IOException("MapFile empty");
       }
       
@@ -357,7 +357,7 @@
 
       if (seekIndex != -1                         // seeked before
           && seekIndex+1 < count           
-          && comparator.compare(key,keys[seekIndex+1])<0 // before next indexed
+          && comparator.compare(key, keys[seekIndex+1])<0 // before next indexed
           && comparator.compare(key, nextKey)
           >= 0) {                                 // but after last seeked
         // do nothing
@@ -431,9 +431,9 @@
      * @return          - returns the key that was the closest match or null if eof.
      */
     public synchronized WritableComparable getClosest(WritableComparable key, Writable val)
-        throws IOException {
+      throws IOException {
       
-      if(seekInternal(key) > 0) {
+      if (seekInternal(key) > 0) {
         return null;
       }
       data.getCurrentValue(val);
@@ -442,7 +442,7 @@
 
     /** Close the map. */
     public synchronized void close() throws IOException {
-      if (! indexClosed) {
+      if (!indexClosed) {
 	index.close();
       }
       data.close();
@@ -482,7 +482,7 @@
    * @throws Exception
    */
   public static long fix(FileSystem fs, Path dir,
-          Class keyClass, Class valueClass, boolean dryrun, Configuration conf) throws Exception {
+                         Class keyClass, Class valueClass, boolean dryrun, Configuration conf) throws Exception {
     String dr = (dryrun ? "[DRY RUN ] " : "");
     Path data = new Path(dir, DATA_FILE_NAME);
     Path index = new Path(dir, INDEX_FILE_NAME);
@@ -498,11 +498,11 @@
     SequenceFile.Reader dataReader = new SequenceFile.Reader(fs, data, conf);
     if (!dataReader.getKeyClass().equals(keyClass)) {
       throw new Exception(dr + "Wrong key class in " + dir + ", expected" + keyClass.getName() +
-              ", got " + dataReader.getKeyClass().getName());
+                          ", got " + dataReader.getKeyClass().getName());
     }
     if (!dataReader.getValueClass().equals(valueClass)) {
       throw new Exception(dr + "Wrong value class in " + dir + ", expected" + valueClass.getName() +
-              ", got " + dataReader.getValueClass().getName());
+                          ", got " + dataReader.getValueClass().getName());
     }
     long cnt = 0L;
     Writable key = (Writable)ReflectionUtils.newInstance(keyClass, conf);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Thu Apr 19 14:34:41 2007
@@ -147,8 +147,8 @@
       } else {
         throw new IllegalArgumentException("Not a primitive: "+declaredClass);
       }
-    } else if (declaredClass.isEnum() ) {         // enum
-      UTF8.writeString( out, ((Enum)instance).name() );
+    } else if (declaredClass.isEnum()) {         // enum
+      UTF8.writeString(out, ((Enum)instance).name());
     } else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable
       UTF8.writeString(out, instance.getClass().getName());
       ((Writable)instance).write(out);
@@ -169,7 +169,7 @@
   /** Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding. */
   @SuppressWarnings("unchecked")
-    public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
+  public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
     throws IOException {
     String className = UTF8.readString(in);
     Class<?> declaredClass = PRIMITIVE_NAMES.get(className);
@@ -216,8 +216,8 @@
       
     } else if (declaredClass == String.class) {        // String
       instance = UTF8.readString(in);
-    } else if( declaredClass.isEnum() ) {         // enum
-      instance = Enum.valueOf( (Class<? extends Enum>) declaredClass, UTF8.readString(in) );
+    } else if (declaredClass.isEnum()) {         // enum
+      instance = Enum.valueOf((Class<? extends Enum>) declaredClass, UTF8.readString(in));
     } else {                                      // Writable
       Class instanceClass = null;
       try {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Thu Apr 19 14:34:41 2007
@@ -109,7 +109,7 @@
     createWriter(FileSystem fs, Configuration conf, Path name, 
                  Class keyClass, Class valClass) 
     throws IOException {
-    return createWriter(fs,conf,name,keyClass,valClass,
+    return createWriter(fs, conf, name, keyClass, valClass,
                         getCompressionType(conf));
   }
   
@@ -679,7 +679,7 @@
       out.writeBoolean(this.isCompressed());
       out.writeBoolean(this.isBlockCompressed());
       
-      if(this.isCompressed()) {
+      if (this.isCompressed()) {
         Text.writeString(out, (codec.getClass()).getName());
       }
       this.metadata.write(out);
@@ -698,7 +698,7 @@
       this.compress = compress;
       this.codec = codec;
       this.metadata = metadata;
-      if(this.codec != null) {
+      if (this.codec != null) {
         ReflectionUtils.setConf(this.codec, this.conf);
         this.deflateFilter = this.codec.createOutputStream(buffer);
         this.deflateOut = 
@@ -2351,7 +2351,7 @@
         rawKey.reset();
         rawKey.write(ms.getKey().getData(), 0, ms.getKey().getLength());
         //load the raw value. Re-use the existing rawValue buffer
-        if(rawValue == null)
+        if (rawValue == null)
           rawValue = ms.in.createValueBytes();
         int valLength = ms.nextRawValue(rawValue);
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java Thu Apr 19 14:34:41 2007
@@ -202,7 +202,7 @@
    * increased to match. The existing contents of the buffer
    * (if any) are deleted.
    */
-  private void setCapacity( int len ) {
+  private void setCapacity(int len) {
     if (bytes == null || bytes.length < len)
       bytes = new byte[len];      
   }
@@ -246,7 +246,7 @@
   /** Compare two Texts bytewise using standard UTF8 ordering. */
   public int compareTo(Object o) {
     Text that = (Text)o;
-    if(this == that)
+    if (this == that)
       return 0;
     else
       return WritableComparator.compareBytes(bytes, 0, length,

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java Thu Apr 19 14:34:41 2007
@@ -45,7 +45,7 @@
   public void readFields(DataInput in) throws IOException {
     byte version = in.readByte();                 // read version
     if (version != getVersion())
-      throw new VersionMismatchException(getVersion(),version);
+      throw new VersionMismatchException(getVersion(), version);
   }
 
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java Thu Apr 19 14:34:41 2007
@@ -175,21 +175,21 @@
    * @return deserialized long
    */
   public static long readVLong(byte[] bytes, int start) throws IOException {
-      int len = bytes[start];
-      if (len >= -112) {
-          return len;
-      }
-      boolean isNegative = (len < -120);
-      len = isNegative ? -(len + 120) : -(len + 112);
-      if (start+1+len>bytes.length)
-          throw new IOException(
-                  "Not enough number of bytes for a zero-compressed integer");
-      long i = 0;
-      for (int idx = 0; idx < len; idx++) {
-          i = i << 8;
-          i = i | (bytes[start+1+idx] & 0xFF);
-      }
-      return (isNegative ? (i ^ -1L) : i);
+    int len = bytes[start];
+    if (len >= -112) {
+      return len;
+    }
+    boolean isNegative = (len < -120);
+    len = isNegative ? -(len + 120) : -(len + 112);
+    if (start+1+len>bytes.length)
+      throw new IOException(
+                            "Not enough number of bytes for a zero-compressed integer");
+    long i = 0;
+    for (int idx = 0; idx < len; idx++) {
+      i = i << 8;
+      i = i | (bytes[start+1+idx] & 0xFF);
+    }
+    return (isNegative ? (i ^ -1L) : i);
   }
   
   /**
@@ -200,6 +200,6 @@
    * @return deserialized integer
    */
   public static int readVInt(byte[] bytes, int start) throws IOException {
-      return (int) readVLong(bytes, start);
+    return (int) readVLong(bytes, start);
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Thu Apr 19 14:34:41 2007
@@ -37,8 +37,8 @@
     byte[] outbuf = new byte[length];
     ByteArrayOutputStream bos =  new ByteArrayOutputStream();
     int len;
-    while((len=gzi.read(outbuf,0,outbuf.length)) != -1){
-      bos.write(outbuf,0,len);
+    while((len=gzi.read(outbuf, 0, outbuf.length)) != -1){
+      bos.write(outbuf, 0, len);
     }
     byte[] decompressed =  bos.toByteArray();
     bos.close();
@@ -55,12 +55,12 @@
     if (bytes != null) {
       ByteArrayOutputStream bos =  new ByteArrayOutputStream();
       GZIPOutputStream gzout = new GZIPOutputStream(bos);
-      gzout.write(bytes,0,bytes.length);
+      gzout.write(bytes, 0, bytes.length);
       gzout.close();
       byte[] buffer = bos.toByteArray();
       int len = buffer.length;
       out.writeInt(len);
-      out.write(buffer,0,len);
+      out.write(buffer, 0, len);
       /* debug only! Once we have confidence, can lose this. */
       return ((bytes.length != 0) ? (100*buffer.length)/bytes.length : 0);
     } else {
@@ -94,7 +94,7 @@
       byte[] buffer = s.getBytes("UTF-8");
       int len = buffer.length;
       out.writeInt(len);
-      out.write(buffer,0,len);
+      out.write(buffer, 0, len);
     } else {
       out.writeInt(-1);
     }
@@ -183,7 +183,7 @@
    */
   public static void displayByteArray(byte[] record){
     int i;
-    for(i=0;i < record.length -1 ; i++){
+    for(i=0;i < record.length -1; i++){
       if (i % 16 == 0) { System.out.println(); }
       System.out.print(Integer.toHexString(record[i]  >> 4 & 0x0F));
       System.out.print(Integer.toHexString(record[i] & 0x0F));

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java Thu Apr 19 14:34:41 2007
@@ -44,7 +44,7 @@
    *                            algorithm with given bufferSize
    */
   public BlockCompressorStream(OutputStream out, Compressor compressor, 
-      int bufferSize, int compressionOverhead) {
+                               int bufferSize, int compressionOverhead) {
     super(out, compressor, bufferSize);
     MAX_INPUT_SIZE = bufferSize - compressionOverhead;
   }
@@ -70,7 +70,7 @@
     if (b == null) {
       throw new NullPointerException();
     } else if ((off < 0) || (off > b.length) || (len < 0) ||
-            ((off + len) > b.length)) {
+               ((off + len) > b.length)) {
       throw new IndexOutOfBoundsException();
     } else if (len == 0) {
       return;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java Thu Apr 19 14:34:41 2007
@@ -41,7 +41,7 @@
    * @param bufferSize size of buffer
    */
   public BlockDecompressorStream(InputStream in, Decompressor decompressor, 
-      int bufferSize) {
+                                 int bufferSize) {
     super(in, decompressor, bufferSize);
   }
   
@@ -123,7 +123,7 @@
     int b3 = in.read();
     int b4 = in.read();
     if ((b1 | b2 | b3 | b4) < 0)
-        throw new EOFException();
+      throw new EOFException();
     return ((b1 << 24) + (b2 << 16) + (b3 << 8) + (b4 << 0));
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java Thu Apr 19 14:34:41 2007
@@ -122,7 +122,7 @@
         buf.append(itr.next().getName());
       }
     }
-    conf.set("io.compression.codecs",buf.toString());   
+    conf.set("io.compression.codecs", buf.toString());   
   }
   
   /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/LzoCodec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/LzoCodec.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/LzoCodec.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/LzoCodec.java Thu Apr 19 14:34:41 2007
@@ -43,11 +43,11 @@
   private Configuration conf;
   
   public void setConf(Configuration conf) {
-	  this.conf = conf;
+    this.conf = conf;
   }
   
   public Configuration getConf() {
-	  return conf;
+    return conf;
   }
 
   private static boolean nativeLzoLoaded = false;
@@ -55,7 +55,7 @@
   static {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       nativeLzoLoaded = LzoCompressor.isNativeLzoLoaded() &&
-                          LzoDecompressor.isNativeLzoLoaded();
+        LzoDecompressor.isNativeLzoLoaded();
       
       if (nativeLzoLoaded) {
         LOG.info("Successfully loaded & initialized native-lzo library");
@@ -78,7 +78,7 @@
   }
   
   public CompressionOutputStream createOutputStream(OutputStream out) 
-  throws IOException {
+    throws IOException {
     // Ensure native-lzo library is loaded & initialized
     if (!isNativeLzoLoaded()) {
       throw new IOException("native-lzo library not available");
@@ -107,12 +107,12 @@
     // Create the lzo output-stream
     LzoCompressor.CompressionStrategy strategy = 
       LzoCompressor.CompressionStrategy.valueOf(
-              conf.get("io.compression.codec.lzo.compressor",
-                        LzoCompressor.CompressionStrategy.LZO1X_1.name()
-                      )
-                    ); 
+                                                conf.get("io.compression.codec.lzo.compressor",
+                                                         LzoCompressor.CompressionStrategy.LZO1X_1.name()
+                                                         )
+                                                ); 
     int bufferSize = conf.getInt("io.compression.codec.lzo.buffersize", 
-                                  64*1024);
+                                 64*1024);
     int compressionOverhead = 0;
     if (strategy.name().contains("LZO1")) {
       compressionOverhead = (int)(((bufferSize - (64 + 3)) * 16.0) / 17.0);  
@@ -121,12 +121,12 @@
     }
      
     return new BlockCompressorStream(out, 
-            new LzoCompressor(strategy, bufferSize), 
-            bufferSize, compressionOverhead);
+                                     new LzoCompressor(strategy, bufferSize), 
+                                     bufferSize, compressionOverhead);
   }
   
   public CompressionInputStream createInputStream(InputStream in) 
-  throws IOException {
+    throws IOException {
     // Ensure native-lzo library is loaded & initialized
     if (!isNativeLzoLoaded()) {
       throw new IOException("native-lzo library not available");
@@ -135,16 +135,16 @@
     // Create the lzo input-stream
     LzoDecompressor.CompressionStrategy strategy = 
       LzoDecompressor.CompressionStrategy.valueOf(
-              conf.get("io.compression.codec.lzo.decompressor",
-                        LzoDecompressor.CompressionStrategy.LZO1X.name()
-                      )
-                    ); 
+                                                  conf.get("io.compression.codec.lzo.decompressor",
+                                                           LzoDecompressor.CompressionStrategy.LZO1X.name()
+                                                           )
+                                                  ); 
     int bufferSize = conf.getInt("io.compression.codec.lzo.buffersize", 
-                                  64*1024);
+                                 64*1024);
 
     return new BlockDecompressorStream(in, 
-            new LzoDecompressor(strategy, bufferSize), 
-            bufferSize);
+                                       new LzoDecompressor(strategy, bufferSize), 
+                                       bufferSize);
   }
   
   /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoCompressor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoCompressor.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoCompressor.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoCompressor.java Thu Apr 19 14:34:41 2007
@@ -153,7 +153,7 @@
       nativeLzoLoaded = true;
     } else {
       LOG.error("Cannot load " + LzoCompressor.class.getName() + 
-              " without native-hadoop library!");
+                " without native-hadoop library!");
     }
   }
   
@@ -220,7 +220,7 @@
     // Reinitialize lzo's input direct buffer
     uncompressedDirectBuf.rewind();
     ((ByteBuffer)uncompressedDirectBuf).put(userBuf, userBufOff,  
-                                          uncompressedDirectBufLen);
+                                            uncompressedDirectBufLen);
 
     // Note how much data is being fed to lzo
     userBufOff += uncompressedDirectBufLen;
@@ -261,7 +261,7 @@
   }
 
   public synchronized int compress(byte[] b, int off, int len) 
-  throws IOException {
+    throws IOException {
     if (b == null) {
       throw new NullPointerException();
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoDecompressor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoDecompressor.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoDecompressor.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/lzo/LzoDecompressor.java Thu Apr 19 14:34:41 2007
@@ -133,7 +133,7 @@
       nativeLzoLoaded = true;
     } else {
       LOG.error("Cannot load " + LzoDecompressor.class.getName() + 
-              " without native-hadoop library!");
+                " without native-hadoop library!");
     }
   }
   
@@ -202,7 +202,7 @@
     // Reinitialize lzo's input direct-buffer
     compressedDirectBuf.rewind();
     ((ByteBuffer)compressedDirectBuf).put(userBuf, userBufOff, 
-                                        compressedDirectBufLen);
+                                          compressedDirectBufLen);
     
     // Note how much data is being fed to lzo
     userBufOff += compressedDirectBufLen;
@@ -243,7 +243,7 @@
   }
 
   public synchronized int decompress(byte[] b, int off, int len) 
-  throws IOException {
+    throws IOException {
     if (b == null) {
       throw new NullPointerException();
     }
@@ -255,7 +255,7 @@
     
     // Check if there is uncompressed data
     n = uncompressedDirectBuf.remaining();
-    if(n > 0) {
+    if (n > 0) {
       n = Math.min(n, len);
       ((ByteBuffer)uncompressedDirectBuf).get(b, off, n);
       return n;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java Thu Apr 19 14:34:41 2007
@@ -188,7 +188,7 @@
    * @param directBufferSize Size of the direct buffer to be used.
    */
   public ZlibCompressor(CompressionLevel level, CompressionStrategy strategy, 
-      CompressionHeader header, int directBufferSize) {
+                        CompressionHeader header, int directBufferSize) {
     this.level = level;
     this.strategy = strategy;
     this.windowBits = header;
@@ -209,9 +209,9 @@
    */
   public ZlibCompressor() {
     this(CompressionLevel.DEFAULT_COMPRESSION, 
-        CompressionStrategy.DEFAULT_STRATEGY, 
-        CompressionHeader.DEFAULT_HEADER, 
-        DEFAULT_DIRECT_BUFFER_SIZE);
+         CompressionStrategy.DEFAULT_STRATEGY, 
+         CompressionHeader.DEFAULT_HEADER, 
+         DEFAULT_DIRECT_BUFFER_SIZE);
   }
   
   public synchronized void setInput(byte[] b, int off, int len) {
@@ -242,7 +242,7 @@
     // Reinitialize zlib's input direct buffer
     uncompressedDirectBuf.rewind();
     ((ByteBuffer)uncompressedDirectBuf).put(userBuf, userBufOff,  
-                                          uncompressedDirectBufLen);
+                                            uncompressedDirectBufLen);
 
     // Note how much data is being fed to zlib
     userBufOff += uncompressedDirectBufLen;
@@ -289,7 +289,7 @@
   }
 
   public synchronized int compress(byte[] b, int off, int len) 
-  throws IOException {
+    throws IOException {
     if (b == null) {
       throw new NullPointerException();
     }
@@ -369,7 +369,7 @@
   private native static void initIDs();
   private native static long init(int level, int strategy, int windowBits);
   private native static void setDictionary(long strm, byte[] b, int off,
-       int len);
+                                           int len);
   private native int deflateBytesDirect();
   private native static long getBytesRead(long strm);
   private native static long getBytesWritten(long strm);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java Thu Apr 19 14:34:41 2007
@@ -147,7 +147,7 @@
     // Reinitialize zlib's input direct buffer
     compressedDirectBuf.rewind();
     ((ByteBuffer)compressedDirectBuf).put(userBuf, userBufOff, 
-                                        compressedDirectBufLen);
+                                          compressedDirectBufLen);
     
     // Note how much data is being fed to zlib
     userBufOff += compressedDirectBufLen;
@@ -195,7 +195,7 @@
   }
 
   public synchronized int decompress(byte[] b, int off, int len) 
-  throws IOException {
+    throws IOException {
     if (b == null) {
       throw new NullPointerException();
     }
@@ -207,7 +207,7 @@
     
     // Check if there is uncompressed data
     n = uncompressedDirectBuf.remaining();
-    if(n > 0) {
+    if (n > 0) {
       n = Math.min(n, len);
       ((ByteBuffer)uncompressedDirectBuf).get(b, off, n);
       return n;
@@ -278,7 +278,7 @@
   private native static void initIDs();
   private native static long init(int windowBits);
   private native static void setDictionary(long strm, byte[] b, int off,
-       int len);
+                                           int len);
   private native int inflateBytesDirect();
   private native static long getBytesRead(long strm);
   private native static long getBytesWritten(long strm);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/retry/RetryPolicies.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/retry/RetryPolicies.java Thu Apr 19 14:34:41 2007
@@ -88,7 +88,7 @@
    * </p>
    */
   public static final RetryPolicy retryByException(RetryPolicy defaultPolicy,
-        Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) {
+                                                   Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) {
     return new ExceptionDependentRetry(defaultPolicy, exceptionToPolicyMap);
   }
   
@@ -169,7 +169,7 @@
     Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap;
     
     public ExceptionDependentRetry(RetryPolicy defaultPolicy,
-        Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) {
+                                   Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) {
       this.defaultPolicy = defaultPolicy;
       this.exceptionToPolicyMap = exceptionToPolicyMap;
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Thu Apr 19 14:34:41 2007
@@ -65,7 +65,7 @@
     new Hashtable<InetSocketAddress, Connection>();
 
   private Class valueClass;                       // class of call values
-  private int timeout ;// timeout for calls
+  private int timeout;// timeout for calls
   private int counter;                            // counter for call ids
   private boolean running = true;                 // true while client runs
   private Configuration conf;
@@ -94,7 +94,7 @@
     /** Called by the connection thread when the call is complete and the
      * value or error string are available.  Notifies by default.  */
     public synchronized void callComplete() {
-        notify();                                 // notify caller
+      notify();                                 // notify caller
     }
 
     /** Update lastActivity with the current time. */
@@ -132,7 +132,7 @@
 
     public Connection(InetSocketAddress address) throws IOException {
       if (address.isUnresolved()) {
-         throw new UnknownHostException("unknown host: " + address.getHostName());
+        throw new UnknownHostException("unknown host: " + address.getHostName());
       }
       this.address = address;
       this.setName("IPC Client connection to " + address.toString());
@@ -183,7 +183,7 @@
                }
                return value;
              }
-          }));
+           }));
       this.out = new DataOutputStream
         (new BufferedOutputStream
          (new FilterOutputStream(socket.getOutputStream()) {
@@ -282,7 +282,7 @@
           decrementRef();
         }
       } catch (EOFException eof) {
-          // This is what happens when the remote side goes down
+        // This is what happens when the remote side goes down
       } catch (Exception e) {
         LOG.info(StringUtils.stringifyException(e));
       } finally {
@@ -408,11 +408,11 @@
           while (i.hasNext()) {
             Connection c = (Connection)i.next();
             if (c.isIdle()) { 
-            //We don't actually close the socket here (i.e., don't invoke
-            //the close() method). We leave that work to the response receiver
-            //thread. The reason for that is since we have taken a lock on the
-            //connections table object, we don't want to slow down the entire
-            //system if we happen to talk to a slow server.
+              //We don't actually close the socket here (i.e., don't invoke
+              //the close() method). We leave that work to the response receiver
+              //thread. The reason for that is since we have taken a lock on the
+              //connections table object, we don't want to slow down the entire
+              //system if we happen to talk to a slow server.
               i.remove();
               synchronized (c) {
                 c.setCloseConnection();
@@ -429,8 +429,8 @@
    * class. */
   public Client(Class valueClass, Configuration conf) {
     this.valueClass = valueClass;
-    this.timeout = conf.getInt("ipc.client.timeout",10000);
-    this.maxIdleTime = conf.getInt("ipc.client.connection.maxidletime",1000);
+    this.timeout = conf.getInt("ipc.client.timeout", 10000);
+    this.maxIdleTime = conf.getInt("ipc.client.connection.maxidletime", 1000);
     this.maxRetries = conf.getInt("ipc.client.connect.max.retries", 10);
     this.conf = conf;
 
@@ -438,7 +438,7 @@
     t.setDaemon(true);
     t.setName(valueClass.getName() + " Connection Culler");
     LOG.debug(valueClass.getName() + 
-             "Connection culler maxidletime= " + maxIdleTime + "ms");
+              "Connection culler maxidletime= " + maxIdleTime + "ms");
     t.start();
   }
  

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java Thu Apr 19 14:34:41 2007
@@ -144,7 +144,7 @@
    * Stop all RPC client connections
    */
   public static synchronized void stopClient(){
-    if(CLIENT != null)
+    if (CLIENT != null)
       CLIENT.stop();
   }
 
@@ -224,9 +224,9 @@
     while (true) {
       try {
         return getProxy(protocol, clientVersion, addr, conf);
-      } catch( ConnectException se ) {  // namenode has not been started
+      } catch(ConnectException se) {  // namenode has not been started
         LOG.info("Server at " + addr + " not available yet, Zzzzz...");
-      } catch( SocketTimeoutException te ) {  // namenode is busy
+      } catch(SocketTimeoutException te) {  // namenode is busy
         LOG.info("Problem connecting to server: " + addr);
       }
       try {
@@ -241,9 +241,9 @@
   public static VersionedProtocol getProxy(Class protocol, long clientVersion,
                                            InetSocketAddress addr, Configuration conf) throws IOException {
     VersionedProtocol proxy = (VersionedProtocol) Proxy.newProxyInstance(
-                                  protocol.getClassLoader(),
-                                  new Class[] { protocol },
-                                  new Invoker(addr, conf));
+                                                                         protocol.getClassLoader(),
+                                                                         new Class[] { protocol },
+                                                                         new Invoker(addr, conf));
     long serverVersion = proxy.getProtocolVersion(protocol.getName(), 
                                                   clientVersion);
     if (serverVersion == clientVersion) {
@@ -269,7 +269,7 @@
     }
 
     Object[] values =
-      (Object[])Array.newInstance(method.getReturnType(),wrappedValues.length);
+      (Object[])Array.newInstance(method.getReturnType(), wrappedValues.length);
     for (int i = 0; i < values.length; i++)
       if (wrappedValues[i] != null)
         values[i] = ((ObjectWritable)wrappedValues[i]).get();
@@ -280,7 +280,7 @@
   /** Construct a server for a protocol implementation instance listening on a
    * port and address. */
   public static Server getServer(final Object instance, final String bindAddress, final int port, Configuration conf) 
-  throws IOException {
+    throws IOException {
     return getServer(instance, bindAddress, port, 1, false, conf);
   }
 
@@ -289,8 +289,8 @@
   public static Server getServer(final Object instance, final String bindAddress, final int port,
                                  final int numHandlers,
                                  final boolean verbose, Configuration conf) 
-  throws IOException {
-    return new Server(instance, conf, bindAddress,port, numHandlers, verbose);
+    throws IOException {
+    return new Server(instance, conf, bindAddress, port, numHandlers, verbose);
   }
 
   /** An RPC Server. */
@@ -306,7 +306,7 @@
      * @param port the port to listen for connections on
      */
     public Server(Object instance, Configuration conf, String bindAddress, int port) 
-    throws IOException {
+      throws IOException {
       this(instance, conf,  bindAddress, port, 1, false);
     }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Thu Apr 19 14:34:41 2007
@@ -100,7 +100,7 @@
    */
   public static InetAddress getRemoteIp() {
     Call call = CurCall.get();
-    if ( call != null ) {
+    if (call != null) {
       return call.connection.socket.getInetAddress();
     }
     return null;
@@ -110,7 +110,7 @@
    */
   public static String getRemoteAddress() {
     InetAddress addr = getRemoteIp();
-    return ( addr == null ) ? null : addr.getHostAddress();
+    return (addr == null) ? null : addr.getHostAddress();
   }
   
   private String bindAddress; 
@@ -137,9 +137,9 @@
   private LinkedList<Call> callQueue = new LinkedList<Call>(); // queued calls
 
   private List<Connection> connectionList = 
-       Collections.synchronizedList(new LinkedList<Connection>());
-                                                       //maintain a list
-                                                       //of client connections
+    Collections.synchronizedList(new LinkedList<Connection>());
+  //maintain a list
+  //of client connections
   private Listener listener = null;
   private int numConnections = 0;
   private Handler[] handlers = null;
@@ -177,7 +177,7 @@
     private int backlogLength = conf.getInt("ipc.server.listen.queue.size", 128);
     
     public Listener() throws IOException {
-      address = new InetSocketAddress(bindAddress,port);
+      address = new InetSocketAddress(bindAddress, port);
       // Create a new server socket and set to non blocking mode
       acceptChannel = ServerSocketChannel.open();
       acceptChannel.configureBlocking(false);
@@ -334,8 +334,8 @@
       }
       if (LOG.isDebugEnabled())
         LOG.debug("Server connection from " + c.toString() +
-                "; # active connections: " + numConnections +
-                "; # queued calls: " + callQueue.size() );
+                  "; # active connections: " + numConnections +
+                  "; # queued calls: " + callQueue.size());
     }
 
     void doRead(SelectionKey key) {
@@ -361,8 +361,8 @@
         try {
           if (LOG.isDebugEnabled())
             LOG.debug(getName() + ": disconnecting client " + 
-                  c.getHostAddress() + ". Number of active connections: "+
-                  numConnections);
+                      c.getHostAddress() + ". Number of active connections: "+
+                      numConnections);
           c.close();
         } catch (Exception e) {}
         c = null;
@@ -381,7 +381,7 @@
         try {
           acceptChannel.socket().close();
         } catch (IOException e) {
-            LOG.info(getName() + ":Exception in closing listener socket. " + e);
+          LOG.info(getName() + ":Exception in closing listener socket. " + e);
         }
       }
     }
@@ -405,7 +405,7 @@
     private int remotePort;
 
     public Connection(SelectionKey key, SocketChannel channel, 
-    long lastContact) {
+                      long lastContact) {
       this.key = key;
       this.channel = channel;
       this.lastContact = lastContact;
@@ -414,7 +414,7 @@
       this.socket = channel.socket();
       this.out = new DataOutputStream
         (new BufferedOutputStream(
-         this.channelOut = new SocketChannelOutputStream( channel )));
+          this.channelOut = new SocketChannelOutputStream(channel)));
       InetAddress addr = socket.getInetAddress();
       if (addr == null) {
         this.hostAddress = "*Unknown*";
@@ -441,22 +441,22 @@
     }
 
     private boolean timedOut() {
-      if(System.currentTimeMillis() -  lastContact > maxIdleTime)
+      if (System.currentTimeMillis() -  lastContact > maxIdleTime)
         return true;
       return false;
     }
 
     private boolean timedOut(long currentTime) {
-        if(currentTime -  lastContact > maxIdleTime)
-          return true;
-        return false;
+      if (currentTime -  lastContact > maxIdleTime)
+        return true;
+      return false;
     }
 
     public int readAndProcess() throws IOException, InterruptedException {
       int count = -1;
       if (dataLengthBuffer.remaining() > 0) {
         count = channel.read(dataLengthBuffer);       
-        if ( count < 0 || dataLengthBuffer.remaining() > 0 ) 
+        if (count < 0 || dataLengthBuffer.remaining() > 0) 
           return count;        
         dataLengthBuffer.flip(); 
         // Is this a new style header?
@@ -491,7 +491,7 @@
 
     private void processData() throws  IOException, InterruptedException {
       DataInputStream dis =
-          new DataInputStream(new ByteArrayInputStream( data.array() ));
+        new DataInputStream(new ByteArrayInputStream(data.array()));
       int id = dis.readInt();                    // try to read an id
         
       if (LOG.isDebugEnabled())
@@ -562,13 +562,13 @@
           
           if (LOG.isDebugEnabled())
             LOG.debug(getName() + ": has #" + call.id + " from " +
-                     call.connection);
+                      call.connection);
           
           String errorClass = null;
           String error = null;
           Writable value = null;
           
-          CurCall.set( call );
+          CurCall.set(call);
           try {
             value = call(call.param);             // make the call
           } catch (Throwable e) {
@@ -576,7 +576,7 @@
             errorClass = e.getClass().getName();
             error = StringUtils.stringifyException(e);
           }
-          CurCall.set( null );
+          CurCall.set(null);
             
           DataOutputStream out = call.connection.out;
           synchronized (out) {
@@ -614,13 +614,13 @@
    * 
    */
   protected Server(String bindAddress, int port, Class paramClass, int handlerCount, Configuration conf) 
-  throws IOException {
+    throws IOException {
     this.bindAddress = bindAddress;
     this.conf = conf;
     this.port = port;
     this.paramClass = paramClass;
     this.handlerCount = handlerCount;
-    this.timeout = conf.getInt("ipc.client.timeout",10000);
+    this.timeout = conf.getInt("ipc.client.timeout", 10000);
     maxCallStartAge = (long) (timeout * MAX_CALL_QUEUE_TIME);
     maxQueueSize = handlerCount * MAX_QUEUE_SIZE_PER_HANDLER;
     this.maxIdleTime = conf.getInt("ipc.client.maxidletime", 120000);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/SocketChannelOutputStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/SocketChannelOutputStream.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/SocketChannelOutputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/SocketChannelOutputStream.java Thu Apr 19 14:34:41 2007
@@ -85,7 +85,7 @@
    */
   public void write(byte[] buf, int offset, int length) throws IOException
   {
-    flush = ByteBuffer.wrap(buf,offset,length);
+    flush = ByteBuffer.wrap(buf, offset, length);
     flushBuffer();
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java Thu Apr 19 14:34:41 2007
@@ -157,8 +157,8 @@
     //value[i] is stored in the following byte range:
     //startOffsets[i] + keyLengths[i] through valLengths[i]
     value.reset(keyValBuffer,
-       startOffsets[currStartOffsetIndex] + keyLengths[currStartOffsetIndex],
-       valLengths[currStartOffsetIndex]);
+                startOffsets[currStartOffsetIndex] + keyLengths[currStartOffsetIndex],
+                valLengths[currStartOffsetIndex]);
     return value;
   }
 
@@ -181,7 +181,7 @@
     int start;
     int dataSize;
     private void reset(DataOutputBuffer d, int start, int length) 
-    throws IOException {
+      throws IOException {
       data = d.getData();
       this.start = start;
       dataSize = length;
@@ -192,14 +192,14 @@
     }
             
     public void writeUncompressedBytes(DataOutputStream outStream)
-    throws IOException {
+      throws IOException {
       outStream.write(data, start, dataSize);
     }
 
     public void writeCompressedBytes(DataOutputStream outStream) 
-    throws IllegalArgumentException, IOException {
+      throws IllegalArgumentException, IOException {
       throw
-      new IllegalArgumentException("UncompressedBytes cannot be compressed!");
+        new IllegalArgumentException("UncompressedBytes cannot be compressed!");
     }
   
   } // InMemUncompressedBytes

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java Thu Apr 19 14:34:41 2007
@@ -344,7 +344,7 @@
         int index = in.readInt();
         String counterName = UTF8.readString(in);
         long value = in.readLong();
-        counters.put(index, new CounterRec(counterName,value));
+        counters.put(index, new CounterRec(counterName, value));
       }
     }
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/DefaultJobHistoryParser.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/DefaultJobHistoryParser.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/DefaultJobHistoryParser.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/DefaultJobHistoryParser.java Thu Apr 19 14:34:41 2007
@@ -2,8 +2,8 @@
 
 import java.util.*;
 import java.io.*;
-import org.apache.hadoop.mapred.JobHistory.Keys ; 
-import org.apache.hadoop.mapred.JobHistory.Values; ;
+import org.apache.hadoop.mapred.JobHistory.Keys; 
+import org.apache.hadoop.mapred.JobHistory.Values;;
 
 /**
  * Default parser for job history files. It creates object model from 
@@ -123,7 +123,7 @@
     Map<String, Map<String, JobHistory.JobInfo>> jobTrackerToJobs = new TreeMap<String, Map<String, JobHistory.JobInfo>>();
 
     Map<String, JobHistory.JobInfo> activeJobs = null;
-    String currentTracker ; 
+    String currentTracker; 
     
     // Implement JobHistory.Listener
 
@@ -168,13 +168,13 @@
       throws IOException {
       
       if (recType.equals(JobHistory.RecordTypes.MapAttempt) || 
-          recType.equals(JobHistory.RecordTypes.ReduceAttempt) ) {
+          recType.equals(JobHistory.RecordTypes.ReduceAttempt)) {
         
-        if( Values.FAILED.name().equals(values.get(Keys.TASK_STATUS) )  ){
-          String hostName = values.get(Keys.HOSTNAME) ;
+        if (Values.FAILED.name().equals(values.get(Keys.TASK_STATUS)) ){
+          String hostName = values.get(Keys.HOSTNAME);
           String taskid = values.get(Keys.TASKID); 
           Set<String> tasks = badNodesToNumFailedTasks.get(hostName); 
-          if( null == tasks  ){
+          if (null == tasks ){
             tasks = new TreeSet<String>(); 
             tasks.add(taskid);
             badNodesToNumFailedTasks.put(hostName, tasks);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileInputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileInputFormat.java Thu Apr 19 14:34:41 2007
@@ -42,11 +42,11 @@
 
   private long minSplitSize = 1;
   private static final PathFilter hiddenFileFilter = new PathFilter(){
-    public boolean accept( Path p ){
-      String name = p.getName(); 
-      return !name.startsWith("_") && !name.startsWith("."); 
-    }
-  }; 
+      public boolean accept(Path p){
+        String name = p.getName(); 
+        return !name.startsWith("_") && !name.startsWith("."); 
+      }
+    }; 
   protected void setMinSplitSize(long minSplitSize) {
     this.minSplitSize = minSplitSize;
   }
@@ -85,7 +85,7 @@
     for (Path p: dirs) {
       FileSystem fs = p.getFileSystem(job); 
       Path[] matches =
-        fs.listPaths(fs.globPaths(p, hiddenFileFilter),hiddenFileFilter);
+        fs.listPaths(fs.globPaths(p, hiddenFileFilter), hiddenFileFilter);
       for (Path match: matches) {
         result.add(fs.makeQualified(match));
       }
@@ -111,7 +111,7 @@
           FileSystem subFS = subPath.getFileSystem(job); 
           if (!subFS.exists(subPath)) {
             result.add(new IOException(
-                "Input path does not exist: " + subPath)); 
+                                       "Input path does not exist: " + subPath)); 
           } else {
             totalFiles++; 
           }
@@ -120,18 +120,18 @@
         Path [] paths = fs.globPaths(p, hiddenFileFilter); 
         if (paths.length == 0) {
           result.add(
-            new IOException("Input Pattern " + p + " matches 0 files")); 
+                     new IOException("Input Pattern " + p + " matches 0 files")); 
         } else {
           // validate globbed paths 
           for (Path gPath : paths) {
             FileSystem gPathFS = gPath.getFileSystem(job); 
             if (!gPathFS.exists(gPath)) {
               result.add(
-                new FileNotFoundException(
-                    "Input path doesnt exist : " + gPath)); 
+                         new FileNotFoundException(
+                                                   "Input path doesnt exist : " + gPath)); 
             }
           }
-          totalFiles += paths.length ; 
+          totalFiles += paths.length; 
         }
       }
     }
@@ -188,7 +188,7 @@
         }
       }
     }
-    LOG.debug( "Total # of splits: " + splits.size() );
+    LOG.debug("Total # of splits: " + splits.size());
     return splits.toArray(new FileSplit[splits.size()]);
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java Thu Apr 19 14:34:41 2007
@@ -82,7 +82,7 @@
 
   public String[] getLocations() throws IOException {
     String[][] hints = file.getFileSystem(conf).
-                            getFileCacheHints(file, start, length);
+      getFileCacheHints(file, start, length);
     if (hints != null && hints.length > 0) {
       return hints[0];
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/IsolationRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/IsolationRunner.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/IsolationRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/IsolationRunner.java Thu Apr 19 14:34:41 2007
@@ -121,7 +121,7 @@
     namer.setConf(conf);
     for(int i=0; i<numMaps; i++) {
       Path f = namer.getInputFile(i, taskId);
-      if(! fs.exists(f)) {
+      if (!fs.exists(f)) {
         LOG.info("Create missing input: " + f);
         SequenceFile.Writer out =
           SequenceFile.createWriter(fs, conf, f, keyClass, valueClass);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Thu Apr 19 14:34:41 2007
@@ -138,7 +138,7 @@
      * Blocks until the job is finished
      */
     public synchronized void waitForCompletion() throws IOException {
-      while (! isComplete()) {
+      while (!isComplete()) {
         try {
           Thread.sleep(5000);
         } catch (InterruptedException ie) {
@@ -243,7 +243,7 @@
    * Submit a job to the MR system
    */
   public RunningJob submitJob(String jobFile) throws FileNotFoundException, 
-                                                     InvalidJobConfException,IOException {
+                                                     InvalidJobConfException, IOException {
     // Load in the submitted job details
     JobConf job = new JobConf(jobFile);
     return submitJob(job);
@@ -307,7 +307,7 @@
 
     if (originalJarPath != null) {           // copy jar to JobTracker's fs
       // use jar name if job is not named. 
-      if( "".equals(job.getJobName() )){
+      if ("".equals(job.getJobName())){
         job.setJobName(new Path(originalJarPath).getName());
       }
       job.setJar(submitJarFile.toString());
@@ -335,23 +335,23 @@
     // sort the splits into order based on size, so that the biggest
     // go first
     Arrays.sort(splits, new Comparator<InputSplit>() {
-        public int compare(InputSplit a, InputSplit b) {
-          try {
-            long left = a.getLength();
-            long right = b.getLength();
-            if (left == right) {
-              return 0;
-            } else if (left < right) {
-              return 1;
-            } else {
-              return -1;
-            }
-          } catch (IOException ie) {
-            throw new RuntimeException("Problem getting input split size",
-                                       ie);
+      public int compare(InputSplit a, InputSplit b) {
+        try {
+          long left = a.getLength();
+          long right = b.getLength();
+          if (left == right) {
+            return 0;
+          } else if (left < right) {
+            return 1;
+          } else {
+            return -1;
           }
+        } catch (IOException ie) {
+          throw new RuntimeException("Problem getting input split size",
+                                     ie);
         }
-      });
+      }
+    });
     // write the splits to a file for the job tracker
     FSDataOutputStream out = fs.create(submitSplitFile);
     try {
@@ -543,7 +543,7 @@
       running = jc.submitJob(job);
       String jobId = running.getJobID();
       LOG.info("Running job: " + jobId);
-      int eventCounter = 0 ; 
+      int eventCounter = 0; 
         
       while (true) {
         try {
@@ -563,26 +563,26 @@
             lastReport = report;
           }
             
-          if( filter  != TaskStatusFilter.NONE){
+          if (filter  != TaskStatusFilter.NONE){
             TaskCompletionEvent[] events = 
               running.getTaskCompletionEvents(eventCounter); 
-            eventCounter += events.length ;
-            for(TaskCompletionEvent event : events ){
-              switch( filter ){
+            eventCounter += events.length;
+            for(TaskCompletionEvent event : events){
+              switch(filter){
               case SUCCEEDED:
-                if( event.getTaskStatus() == 
+                if (event.getTaskStatus() == 
                     TaskCompletionEvent.Status.SUCCEEDED){
                   LOG.info(event.toString());
                   displayTaskLogs(event.getTaskId(), event.getTaskTrackerHttp());
                 }
                 break; 
               case FAILED:
-                if( event.getTaskStatus() == 
+                if (event.getTaskStatus() == 
                     TaskCompletionEvent.Status.FAILED){
                   LOG.info(event.toString());
                   displayTaskLogs(event.getTaskId(), event.getTaskTrackerHttp());
                 }
-                break ; 
+                break; 
               case ALL:
                 LOG.info(event.toString());
                 displayTaskLogs(event.getTaskId(), event.getTaskTrackerHttp());
@@ -654,13 +654,13 @@
   static Configuration getConfiguration(String jobTrackerSpec)
   {
     Configuration conf = new Configuration();
-    if(jobTrackerSpec != null) {        
-      if(jobTrackerSpec.indexOf(":") >= 0) {
+    if (jobTrackerSpec != null) {        
+      if (jobTrackerSpec.indexOf(":") >= 0) {
         conf.set("mapred.job.tracker", jobTrackerSpec);
       } else {
         String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
         URL validate = conf.getResource(classpathFile);
-        if(validate == null) {
+        if (validate == null) {
           throw new RuntimeException(classpathFile + " not found on CLASSPATH");
         }
         conf.addFinalResource(classpathFile);
@@ -675,8 +675,8 @@
    * @param newValue task filter.
    */
   @Deprecated
-    public void setTaskOutputFilter(TaskStatusFilter newValue){
-    this.taskOutputFilter = newValue ;
+  public void setTaskOutputFilter(TaskStatusFilter newValue){
+    this.taskOutputFilter = newValue;
   }
     
   /**
@@ -704,7 +704,7 @@
    * @return task filter. 
    */
   @Deprecated
-    public TaskStatusFilter getTaskOutputFilter(){
+  public TaskStatusFilter getTaskOutputFilter(){
     return this.taskOutputFilter; 
   }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Thu Apr 19 14:34:41 2007
@@ -266,18 +266,18 @@
 
   public InputFormat getInputFormat() {
     return (InputFormat)ReflectionUtils.newInstance(getClass("mapred.input.format.class",
-                                             TextInputFormat.class,
-                                             InputFormat.class),
-                                             this);
+                                                             TextInputFormat.class,
+                                                             InputFormat.class),
+                                                    this);
   }
   public void setInputFormat(Class theClass) {
     setClass("mapred.input.format.class", theClass, InputFormat.class);
   }
   public OutputFormat getOutputFormat() {
     return (OutputFormat)ReflectionUtils.newInstance(getClass("mapred.output.format.class",
-                                              TextOutputFormat.class,
-                                              OutputFormat.class),
-                                              this);
+                                                              TextOutputFormat.class,
+                                                              OutputFormat.class),
+                                                     this);
   }
   public void setOutputFormat(Class theClass) {
     setClass("mapred.output.format.class", theClass, OutputFormat.class);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobEndNotifier.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobEndNotifier.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobEndNotifier.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobEndNotifier.java Thu Apr 19 14:34:41 2007
@@ -25,47 +25,47 @@
   public static void startNotifier() {
     running = true;
     thread = new Thread(
-      new Runnable() {
-        public void run() {
-          try {
-            while (running) {
-              sendNotification(queue.take());
-            }
-          }
-          catch (InterruptedException irex) {
-            if (running) {
-              LOG.error("Thread has ended unexpectedly", irex);
-            }
-          }
-        }
+                        new Runnable() {
+                          public void run() {
+                            try {
+                              while (running) {
+                                sendNotification(queue.take());
+                              }
+                            }
+                            catch (InterruptedException irex) {
+                              if (running) {
+                                LOG.error("Thread has ended unexpectedly", irex);
+                              }
+                            }
+                          }
 
-        private void sendNotification(JobEndStatusInfo notification) {
-          try {
-            int code = httpNotification(notification.getUri());
-            if (code != 200) {
-              throw new IOException("Invalid response status code: " + code);
-            }
-          }
-          catch (IOException ioex) {
-            LOG.error("Notification failure [" + notification + "]", ioex);
-            if (notification.configureForRetry()) {
-              try {
-                queue.put(notification);
-              }
-              catch (InterruptedException iex) {
-                LOG.error("Notification queuing error [" + notification + "]",
-                  iex);
-              }
-            }
-          }
-          catch (Exception ex) {
-            LOG.error("Notification failure [" + notification + "]", ex);
-          }
-        }
+                          private void sendNotification(JobEndStatusInfo notification) {
+                            try {
+                              int code = httpNotification(notification.getUri());
+                              if (code != 200) {
+                                throw new IOException("Invalid response status code: " + code);
+                              }
+                            }
+                            catch (IOException ioex) {
+                              LOG.error("Notification failure [" + notification + "]", ioex);
+                              if (notification.configureForRetry()) {
+                                try {
+                                  queue.put(notification);
+                                }
+                                catch (InterruptedException iex) {
+                                  LOG.error("Notification queuing error [" + notification + "]",
+                                            iex);
+                                }
+                              }
+                            }
+                            catch (Exception ex) {
+                              LOG.error("Notification failure [" + notification + "]", ex);
+                            }
+                          }
 
-      }
+                        }
 
-    );
+                        );
     thread.start();
   }
 
@@ -75,7 +75,7 @@
   }
 
   private static JobEndStatusInfo createNotification(JobConf conf,
-    JobStatus status) {
+                                                     JobStatus status) {
     JobEndStatusInfo notification = null;
     String uri = conf.get("job.end.notification.url");
     if (uri != null) {



Mime
View raw message