hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r530556 [2/12] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/hbase/src/java/org/apache/hadoop/hbase/ src/contrib/hbase/src/test/org/apache/hadoop/hbase/ src/contrib/streaming/src/java/org/ap...
Date Thu, 19 Apr 2007 21:34:53 GMT
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Thu Apr 19 14:34:41 2007
@@ -61,7 +61,7 @@
     }
     
     public void run() {
-      while(! stopRequested) {
+      while(!stopRequested) {
         long startTime = System.currentTimeMillis();
 
         // Grab a list of regions to check
@@ -78,12 +78,12 @@
         // Check to see if they need splitting
 
         Vector<SplitRegion> toSplit = new Vector<SplitRegion>();
-        for(Iterator<HRegion> it = checkSplit.iterator(); it.hasNext(); ) {
+        for(Iterator<HRegion> it = checkSplit.iterator(); it.hasNext();) {
           HRegion cur = it.next();
           Text midKey = new Text();
           
           try {
-            if(cur.needsSplit(midKey)) {
+            if (cur.needsSplit(midKey)) {
               toSplit.add(new SplitRegion(cur, midKey));
             }
             
@@ -92,7 +92,7 @@
           }
         }
 
-        for(Iterator<SplitRegion> it = toSplit.iterator(); it.hasNext(); ) {
+        for(Iterator<SplitRegion> it = toSplit.iterator(); it.hasNext();) {
           SplitRegion r = it.next();
           
           locking.obtainWriteLock();
@@ -161,7 +161,7 @@
   private Thread cacheFlusherThread;
   private class Flusher implements Runnable {
     public void run() {
-      while(! stopRequested) {
+      while(!stopRequested) {
         long startTime = System.currentTimeMillis();
 
         // Grab a list of items to flush
@@ -177,7 +177,7 @@
 
         // Flush them, if necessary
 
-        for(Iterator<HRegion> it = toFlush.iterator(); it.hasNext(); ) {
+        for(Iterator<HRegion> it = toFlush.iterator(); it.hasNext();) {
           HRegion cur = it.next();
           
           try {
@@ -212,12 +212,12 @@
   private Thread logRollerThread;
   private class LogRoller implements Runnable {
     public void run() {
-      while(! stopRequested) {
+      while(!stopRequested) {
 
         // If the number of log entries is high enough, roll the log.  This is a
         // very fast operation, but should not be done too frequently.
 
-        if(log.getNumEntries() > maxLogEntries) {
+        if (log.getNumEntries() > maxLogEntries) {
           try {
             log.rollWriter();
             
@@ -334,7 +334,7 @@
    * processing to cease.
    */
   public void stop() throws IOException {
-    if(! stopRequested) {
+    if (!stopRequested) {
       stopRequested = true;
  
       closeAllRegions();
@@ -375,7 +375,7 @@
    * load/unload instructions.
    */
   public void run() {
-    while(! stopRequested) {
+    while(!stopRequested) {
       HServerInfo info = new HServerInfo(address, rand.nextLong());
       long lastMsg = 0;
       long waitTime;
@@ -398,8 +398,8 @@
       
       // Now ask the master what it wants us to do and tell it what we have done.
       
-      while(! stopRequested) {
-        if((System.currentTimeMillis() - lastMsg) >= msgInterval) {
+      while(!stopRequested) {
+        if ((System.currentTimeMillis() - lastMsg) >= msgInterval) {
 
           HMsg outboundArray[] = null;
           synchronized(outboundMsgs) {
@@ -413,7 +413,7 @@
 
             // Process the HMaster's instruction stream
 
-            if(! processMessages(msgs)) {
+            if (!processMessages(msgs)) {
               break;
             }
 
@@ -529,10 +529,10 @@
     try {
       HRegion region = regions.remove(info.regionName);
       
-      if(region != null) {
+      if (region != null) {
         region.close();
         
-        if(reportWhenCompleted) {
+        if (reportWhenCompleted) {
           reportClose(region);
         }
       }
@@ -548,7 +548,7 @@
     try {
       HRegion region = regions.remove(info.regionName);
   
-      if(region != null) {
+      if (region != null) {
         region.closeAndDelete();
       }
   
@@ -561,7 +561,7 @@
   private void closeAllRegions() throws IOException {
     locking.obtainWriteLock();
     try {
-      for(Iterator<HRegion> it = regions.values().iterator(); it.hasNext(); ) {
+      for(Iterator<HRegion> it = regions.values().iterator(); it.hasNext();) {
         HRegion region = it.next();
         region.close();
       }
@@ -606,7 +606,7 @@
   /** Obtain a table descriptor for the given region */
   public HRegionInfo getRegionInfo(Text regionName) {
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       return null;
     }
     return region.getRegionInfo();
@@ -617,7 +617,7 @@
                                        Text firstRow) throws IOException {
 
     HRegion r = getRegion(regionName);
-    if(r == null) {
+    if (r == null) {
       throw new IOException("Not serving region " + regionName);
     }
     return r.getScanner(cols, firstRow);
@@ -626,12 +626,12 @@
   /** Get the indicated row/column */
   public BytesWritable get(Text regionName, Text row, Text column) throws IOException {
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
     byte results[] = region.get(row, column);
-    if(results != null) {
+    if (results != null) {
       return new BytesWritable(results);
     }
     return null;
@@ -642,15 +642,15 @@
                              int numVersions) throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
     byte results[][] = region.get(row, column, numVersions);
-    if(results != null) {
+    if (results != null) {
       BytesWritable realResults[] = new BytesWritable[results.length];
       for(int i = 0; i < realResults.length; i++) {
-        if(results[i] != null) {
+        if (results[i] != null) {
           realResults[i] = new BytesWritable(results[i]);
         }
       }
@@ -664,15 +664,15 @@
                              long timestamp, int numVersions) throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
     byte results[][] = region.get(row, column, timestamp, numVersions);
-    if(results != null) {
+    if (results != null) {
       BytesWritable realResults[] = new BytesWritable[results.length];
       for(int i = 0; i < realResults.length; i++) {
-        if(results[i] != null) {
+        if (results[i] != null) {
           realResults[i] = new BytesWritable(results[i]);
         }
       }
@@ -684,14 +684,14 @@
   /** Get all the columns (along with their names) for a given row. */
   public LabelledData[] getRow(Text regionName, Text row) throws IOException {
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
     TreeMap<Text, byte[]> map = region.getFull(row);
     LabelledData result[] = new LabelledData[map.size()];
     int counter = 0;
-    for(Iterator<Text> it = map.keySet().iterator(); it.hasNext(); ) {
+    for(Iterator<Text> it = map.keySet().iterator(); it.hasNext();) {
       Text colname = it.next();
       byte val[] = map.get(colname);
       result[counter++] = new LabelledData(colname, val);
@@ -726,7 +726,7 @@
     throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
@@ -743,7 +743,7 @@
                   BytesWritable val) throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
@@ -758,7 +758,7 @@
     throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
@@ -773,7 +773,7 @@
     throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     
@@ -788,7 +788,7 @@
     throws IOException {
     
     HRegion region = getRegion(regionName);
-    if(region == null) {
+    if (region == null) {
       throw new IOException("Not serving region " + regionName);
     }
     

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java Thu Apr 19 14:34:41 2007
@@ -35,7 +35,7 @@
   
   public HServerAddress(String hostAndPort) {
     int colonIndex = hostAndPort.indexOf(':');
-    if(colonIndex < 0) {
+    if (colonIndex < 0) {
       throw new IllegalArgumentException("Not a host:port pair: " + hostAndPort);
     }
     String host = hostAndPort.substring(0, colonIndex);
@@ -80,7 +80,7 @@
     String bindAddress = in.readUTF();
     int port = in.readInt();
     
-    if(bindAddress == null || bindAddress.length() == 0) {
+    if (bindAddress == null || bindAddress.length() == 0) {
       address = null;
       stringValue = null;
       
@@ -91,7 +91,7 @@
   }
 
   public void write(DataOutput out) throws IOException {
-    if(address == null) {
+    if (address == null) {
       out.writeUTF("");
       out.writeInt(0);
       

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java Thu Apr 19 14:34:41 2007
@@ -110,7 +110,7 @@
 
     this.compactdir = new Path(dir, COMPACTION_DIR);
     Path curCompactStore = HStoreFile.getHStoreDir(compactdir, regionName, colFamily);
-    if(fs.exists(curCompactStore)) {
+    if (fs.exists(curCompactStore)) {
       processReadyCompaction();
       fs.delete(curCompactStore);
     }
@@ -123,7 +123,7 @@
     Vector<HStoreFile> hstoreFiles 
       = HStoreFile.loadHStoreFiles(conf, dir, regionName, colFamily, fs);
     
-    for(Iterator<HStoreFile> it = hstoreFiles.iterator(); it.hasNext(); ) {
+    for(Iterator<HStoreFile> it = hstoreFiles.iterator(); it.hasNext();) {
       HStoreFile hsf = it.next();
       mapFiles.put(hsf.loadInfo(fs), hsf);
     }
@@ -138,11 +138,11 @@
     // contain any updates also contained in the log.
 
     long maxSeqID = -1;
-    for(Iterator<HStoreFile> it = hstoreFiles.iterator(); it.hasNext(); ) {
+    for(Iterator<HStoreFile> it = hstoreFiles.iterator(); it.hasNext();) {
       HStoreFile hsf = it.next();
       long seqid = hsf.loadInfo(fs);
-      if(seqid > 0) {
-        if(seqid > maxSeqID) {
+      if (seqid > 0) {
+        if (seqid > maxSeqID) {
           maxSeqID = seqid;
         }
       }
@@ -157,7 +157,7 @@
 
     LOG.debug("reading reconstructionLog");
     
-    if(reconstructionLog != null && fs.exists(reconstructionLog)) {
+    if (reconstructionLog != null && fs.exists(reconstructionLog)) {
       long maxSeqIdInLog = -1;
       TreeMap<HStoreKey, BytesWritable> reconstructedCache 
         = new TreeMap<HStoreKey, BytesWritable>();
@@ -170,7 +170,7 @@
         HLogEdit val = new HLogEdit();
         while(login.next(key, val)) {
           maxSeqIdInLog = Math.max(maxSeqIdInLog, key.getLogSeqNum());
-          if(key.getLogSeqNum() <= maxSeqID) {
+          if (key.getLogSeqNum() <= maxSeqID) {
             continue;
           }
           reconstructedCache.put(new HStoreKey(key.getRow(), val.getColumn(), 
@@ -181,7 +181,7 @@
         login.close();
       }
 
-      if(reconstructedCache.size() > 0) {
+      if (reconstructedCache.size() > 0) {
         
         // We create a "virtual flush" at maxSeqIdInLog+1.
         
@@ -195,7 +195,7 @@
     // should be "timeless"; that is, it should not have an associated seq-ID, 
     // because all log messages have been reflected in the TreeMaps at this point.
     
-    if(mapFiles.size() >= 1) {
+    if (mapFiles.size() >= 1) {
       compactHelper(true);
     }
 
@@ -204,7 +204,7 @@
 
     LOG.debug("starting map readers");
     
-    for(Iterator<Long> it = mapFiles.keySet().iterator(); it.hasNext(); ) {
+    for(Iterator<Long> it = mapFiles.keySet().iterator(); it.hasNext();) {
       Long key = it.next().longValue();
       HStoreFile hsf = mapFiles.get(key);
 
@@ -222,7 +222,7 @@
     LOG.info("closing HStore for " + this.regionName + "/" + this.colFamily);
     
     try {
-      for(Iterator<MapFile.Reader> it = maps.values().iterator(); it.hasNext(); ) {
+      for(Iterator<MapFile.Reader> it = maps.values().iterator(); it.hasNext();) {
         MapFile.Reader map = it.next();
         map.close();
       }
@@ -273,9 +273,9 @@
                                               HStoreKey.class, BytesWritable.class);
       
       try {
-        for(Iterator<HStoreKey> it = inputCache.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<HStoreKey> it = inputCache.keySet().iterator(); it.hasNext();) {
           HStoreKey curkey = it.next();
-          if(this.colFamily.equals(HStoreKey.extractFamily(curkey.getColumn()))) {
+          if (this.colFamily.equals(HStoreKey.extractFamily(curkey.getColumn()))) {
             BytesWritable val = inputCache.get(curkey);
             out.append(curkey, val);
           }
@@ -294,7 +294,7 @@
 
       // C. Finally, make the new MapFile available.
 
-      if(addToAvailableMaps) {
+      if (addToAvailableMaps) {
         locking.obtainWriteLock();
         
         try {
@@ -312,7 +312,7 @@
 
   public Vector<HStoreFile> getAllMapFiles() {
     Vector<HStoreFile> flushedFiles = new Vector<HStoreFile>();
-    for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext(); ) {
+    for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext();) {
       HStoreFile hsf = it.next();
       flushedFiles.add(hsf);
     }
@@ -366,11 +366,11 @@
         // Compute the max-sequenceID seen in any of the to-be-compacted TreeMaps
 
         long maxSeenSeqID = -1;
-        for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext(); ) {
+        for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext();) {
           HStoreFile hsf = it.next();
           long seqid = hsf.loadInfo(fs);
-          if(seqid > 0) {
-            if(seqid > maxSeenSeqID) {
+          if (seqid > 0) {
+            if (seqid > maxSeenSeqID) {
               maxSeenSeqID = seqid;
             }
           }
@@ -380,11 +380,11 @@
         HStoreFile compactedOutputFile 
           = new HStoreFile(conf, compactdir, regionName, colFamily, -1);
         
-        if(toCompactFiles.size() == 1) {
+        if (toCompactFiles.size() == 1) {
           LOG.debug("nothing to compact for " + this.regionName + "/" + this.colFamily);
           
           HStoreFile hsf = toCompactFiles.elementAt(0);
-          if(hsf.loadInfo(fs) == -1) {
+          if (hsf.loadInfo(fs) == -1) {
             return;
           }
         }
@@ -414,7 +414,7 @@
           BytesWritable[] vals = new BytesWritable[toCompactFiles.size()];
           boolean[] done = new boolean[toCompactFiles.size()];
           int pos = 0;
-          for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext(); ) {
+          for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext();) {
             HStoreFile hsf = it.next();
             readers[pos] = new MapFile.Reader(fs, hsf.getMapFilePath().toString(), conf);
             keys[pos] = new HStoreKey();
@@ -431,8 +431,8 @@
           int numDone = 0;
           for(int i = 0; i < readers.length; i++) {
             readers[i].reset();
-            done[i] = ! readers[i].next(keys[i], vals[i]);
-            if(done[i]) {
+            done[i] = !readers[i].next(keys[i], vals[i]);
+            if (done[i]) {
               numDone++;
             }
           }
@@ -446,15 +446,15 @@
 
             int smallestKey = -1;
             for(int i = 0; i < readers.length; i++) {
-              if(done[i]) {
+              if (done[i]) {
                 continue;
               }
               
-              if(smallestKey < 0) {
+              if (smallestKey < 0) {
                 smallestKey = i;
               
               } else {
-                if(keys[i].compareTo(keys[smallestKey]) < 0) {
+                if (keys[i].compareTo(keys[smallestKey]) < 0) {
                   smallestKey = i;
                 }
               }
@@ -463,8 +463,8 @@
             // Reflect the current key/val in the output
 
             HStoreKey sk = keys[smallestKey];
-            if(lastRow.equals(sk.getRow())
-               && lastColumn.equals(sk.getColumn())) {
+            if (lastRow.equals(sk.getRow())
+                && lastColumn.equals(sk.getColumn())) {
               
               timesSeen++;
               
@@ -472,13 +472,13 @@
               timesSeen = 1;
             }
             
-            if(timesSeen <= maxVersions) {
+            if (timesSeen <= maxVersions) {
 
               // Keep old versions until we have maxVersions worth.
               // Then just skip them.
 
-              if(sk.getRow().getLength() != 0
-                 && sk.getColumn().getLength() != 0) {
+              if (sk.getRow().getLength() != 0
+                  && sk.getColumn().getLength() != 0) {
                 
                 // Only write out objects which have a non-zero length key and value
 
@@ -499,7 +499,7 @@
             // Advance the smallest key.  If that reader's all finished, then 
             // mark it as done.
 
-            if(! readers[smallestKey].next(keys[smallestKey], vals[smallestKey])) {
+            if (!readers[smallestKey].next(keys[smallestKey], vals[smallestKey])) {
               done[smallestKey] = true;
               readers[smallestKey].close();
               numDone++;
@@ -516,7 +516,7 @@
 
         // Now, write out an HSTORE_LOGINFOFILE for the brand-new TreeMap.
 
-        if((! deleteSequenceInfo) && maxSeenSeqID >= 0) {
+        if ((!deleteSequenceInfo) && maxSeenSeqID >= 0) {
           compactedOutputFile.writeInfo(fs, maxSeenSeqID);
           
         } else {
@@ -529,7 +529,7 @@
         DataOutputStream out = new DataOutputStream(fs.create(filesToReplace));
         try {
           out.writeInt(toCompactFiles.size());
-          for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext(); ) {
+          for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext();) {
             HStoreFile hsf = it.next();
             hsf.write(out);
           }
@@ -587,7 +587,7 @@
     Path curCompactStore = HStoreFile.getHStoreDir(compactdir, regionName, colFamily);
     try {
       Path doneFile = new Path(curCompactStore, COMPACTION_DONE);
-      if(! fs.exists(doneFile)) {
+      if (!fs.exists(doneFile)) {
         
         // The last execution didn't finish the compaction, so there's nothing 
         // we can do.  We'll just have to redo it. Abandon it and return.
@@ -622,18 +622,18 @@
       // 3. Unload all the replaced MapFiles.
       
       Iterator<HStoreFile> it2 = mapFiles.values().iterator();
-      for(Iterator<MapFile.Reader> it = maps.values().iterator(); it.hasNext(); ) {
+      for(Iterator<MapFile.Reader> it = maps.values().iterator(); it.hasNext();) {
         MapFile.Reader curReader = it.next();
         HStoreFile curMapFile = it2.next();
-        if(toCompactFiles.contains(curMapFile)) {
+        if (toCompactFiles.contains(curMapFile)) {
           curReader.close();
           it.remove();
         }
       }
       
-      for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext(); ) {
+      for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext();) {
         HStoreFile curMapFile = it.next();
-        if(toCompactFiles.contains(curMapFile)) {
+        if (toCompactFiles.contains(curMapFile)) {
           it.remove();
         }
       }
@@ -645,7 +645,7 @@
 
       // 4. Delete all the old files, no longer needed
       
-      for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext(); ) {
+      for(Iterator<HStoreFile> it = toCompactFiles.iterator(); it.hasNext();) {
         HStoreFile hsf = it.next();
         fs.delete(hsf.getMapFilePath());
         fs.delete(hsf.getInfoFilePath());
@@ -720,12 +720,12 @@
           
           do {
             Text readcol = readkey.getColumn();
-            if(results.get(readcol) == null
-               && key.matchesWithoutColumn(readkey)) {
+            if (results.get(readcol) == null
+                && key.matchesWithoutColumn(readkey)) {
               results.put(new Text(readcol), readval.get());
               readval = new BytesWritable();
               
-            } else if(key.getRow().compareTo(readkey.getRow()) > 0) {
+            } else if (key.getRow().compareTo(readkey.getRow()) > 0) {
               break;
             }
             
@@ -745,7 +745,7 @@
    * If 'numVersions' is negative, the method returns all available versions.
    */
   public byte[][] get(HStoreKey key, int numVersions) throws IOException {
-    if(numVersions == 0) {
+    if (numVersions == 0) {
       throw new IllegalArgumentException("Must request at least one value.");
     }
     
@@ -763,12 +763,12 @@
           map.reset();
           HStoreKey readkey = (HStoreKey)map.getClosest(key, readval);
           
-          if(readkey.matchesRowCol(key)) {
+          if (readkey.matchesRowCol(key)) {
             results.add(readval.get());
             readval = new BytesWritable();
 
             while(map.next(readkey, readval) && readkey.matchesRowCol(key)) {
-              if(numVersions > 0 && (results.size() >= numVersions)) {
+              if (numVersions > 0 && (results.size() >= numVersions)) {
                 break;
                 
               } else {
@@ -778,12 +778,12 @@
             }
           }
         }
-        if(results.size() >= numVersions) {
+        if (results.size() >= numVersions) {
           break;
         }
       }
 
-      if(results.size() == 0) {
+      if (results.size() == 0) {
         return null;
         
       } else {
@@ -809,13 +809,13 @@
     // Iterate through all the MapFiles
     
     for(Iterator<Map.Entry<Long, HStoreFile>> it = mapFiles.entrySet().iterator();
-        it.hasNext(); ) {
+        it.hasNext();) {
       
       Map.Entry<Long, HStoreFile> e = it.next();
       HStoreFile curHSF = e.getValue();
       long size = fs.getLength(new Path(curHSF.getMapFilePath(), MapFile.DATA_FILE_NAME));
       
-      if(size > maxSize) {              // This is the largest one so far
+      if (size > maxSize) {              // This is the largest one so far
         maxSize = size;
         mapIndex = e.getKey();
       }
@@ -871,7 +871,7 @@
       try {
         this.readers = new MapFile.Reader[mapFiles.size()];
         int i = 0;
-        for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext(); ) {
+        for(Iterator<HStoreFile> it = mapFiles.values().iterator(); it.hasNext();) {
           HStoreFile curHSF = it.next();
           readers[i++] = new MapFile.Reader(fs, curHSF.getMapFilePath().toString(), conf);
         }
@@ -885,14 +885,14 @@
           keys[i] = new HStoreKey();
           vals[i] = new BytesWritable();
 
-          if(firstRow.getLength() != 0) {
-            if(findFirstRow(i, firstRow)) {
+          if (firstRow.getLength() != 0) {
+            if (findFirstRow(i, firstRow)) {
               continue;
             }
           }
           
           while(getNext(i)) {
-            if(columnMatch(i)) {
+            if (columnMatch(i)) {
               break;
             }
           }
@@ -915,7 +915,7 @@
       HStoreKey firstKey
         = (HStoreKey)readers[i].getClosest(new HStoreKey(firstRow), vals[i]);
       
-      if(firstKey == null) {
+      if (firstKey == null) {
         
         // Didn't find it. Close the scanner and return TRUE
         
@@ -935,7 +935,7 @@
      * @return - true if there is more data available
      */
     boolean getNext(int i) throws IOException {
-      if(! readers[i].next(keys[i], vals[i])) {
+      if (!readers[i].next(keys[i], vals[i])) {
         closeSubScanner(i);
         return false;
       }
@@ -945,7 +945,7 @@
     /** Close down the indicated reader. */
     void closeSubScanner(int i) throws IOException {
       try {
-        if(readers[i] != null) {
+        if (readers[i] != null) {
           readers[i].close();
         }
         
@@ -958,10 +958,10 @@
 
     /** Shut it down! */
     public void close() throws IOException {
-      if(! scannerClosed) {
+      if (!scannerClosed) {
         try {
           for(int i = 0; i < readers.length; i++) {
-            if(readers[i] != null) {
+            if (readers[i] != null) {
               readers[i].close();
             }
           }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java Thu Apr 19 14:34:41 2007
@@ -158,13 +158,13 @@
     for(int i = 0; i < datfiles.length; i++) {
       String name = datfiles[i].getName();
       
-      if(name.startsWith(HSTORE_DATFILE_PREFIX)) {
+      if (name.startsWith(HSTORE_DATFILE_PREFIX)) {
         Long fileId = Long.parseLong(name.substring(HSTORE_DATFILE_PREFIX.length()));
         HStoreFile curfile = new HStoreFile(conf, dir, regionName, colFamily, fileId);
         Path mapfile = curfile.getMapFilePath();
         Path infofile = curfile.getInfoFilePath();
         
-        if(fs.exists(infofile)) {
+        if (fs.exists(infofile)) {
           results.add(curfile);
           
         } else {
@@ -178,12 +178,12 @@
     for(int i = 0; i < infofiles.length; i++) {
       String name = infofiles[i].getName();
       
-      if(name.startsWith(HSTORE_INFOFILE_PREFIX)) {
+      if (name.startsWith(HSTORE_INFOFILE_PREFIX)) {
         long fileId = Long.parseLong(name.substring(HSTORE_INFOFILE_PREFIX.length()));
         HStoreFile curfile = new HStoreFile(conf, dir, regionName, colFamily, fileId);
         Path mapfile = curfile.getMapFilePath();
         
-        if(! fs.exists(mapfile)) {
+        if (!fs.exists(mapfile)) {
           fs.delete(curfile.getInfoFilePath());
         }
       }
@@ -220,7 +220,7 @@
           while(in.next(readkey, readval)) {
             Text key = readkey.getRow();
             
-            if(key.compareTo(midKey) < 0) {
+            if (key.compareTo(midKey) < 0) {
               outA.append(readkey, readval);
               
             } else {
@@ -260,7 +260,7 @@
                                             HStoreKey.class, BytesWritable.class);
     
     try {
-      for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext(); ) {
+      for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext();) {
         HStoreFile src = it.next();
         MapFile.Reader in = new MapFile.Reader(fs, src.getMapFilePath().toString(), conf);
         
@@ -283,11 +283,11 @@
     // Build a unified InfoFile from the source InfoFiles.
 
     long unifiedSeqId = -1;
-    for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext(); ) {
+    for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext();) {
       HStoreFile hsf = it.next();
       long curSeqId = hsf.loadInfo(fs);
       
-      if(curSeqId > unifiedSeqId) {
+      if (curSeqId > unifiedSeqId) {
         unifiedSeqId = curSeqId;
       }
     }
@@ -301,7 +301,7 @@
     
     try {
       byte flag = in.readByte();
-      if(flag == INFO_SEQ_NUM) {
+      if (flag == INFO_SEQ_NUM) {
         return in.readLong();
         
       } else {
@@ -352,17 +352,17 @@
   public int compareTo(Object o) {
     HStoreFile other = (HStoreFile) o;
     int result = this.dir.compareTo(other.dir);    
-    if(result == 0) {
+    if (result == 0) {
       this.regionName.compareTo(other.regionName);
     }
-    if(result == 0) {
+    if (result == 0) {
       result = this.colFamily.compareTo(other.colFamily);
     }    
-    if(result == 0) {
-      if(this.fileId < other.fileId) {
+    if (result == 0) {
+      if (this.fileId < other.fileId) {
         result = -1;
         
-      } else if(this.fileId > other.fileId) {
+      } else if (this.fileId > other.fileId) {
         result = 1;
       }
     }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java Thu Apr 19 14:34:41 2007
@@ -29,7 +29,7 @@
   public static Text extractFamily(Text col) throws IOException {
     String column = col.toString();
     int colpos = column.indexOf(":");
-    if(colpos < 0) {
+    if (colpos < 0) {
       throw new IllegalArgumentException("Illegal column name has no family indicator: " + column);
     }
     return new Text(column.substring(0, colpos));
@@ -94,8 +94,8 @@
   }
   
   public boolean matchesRowCol(HStoreKey other) {
-    if(this.row.compareTo(other.row) == 0 &&
-       this.column.compareTo(other.column) == 0) {
+    if (this.row.compareTo(other.row) == 0 &&
+        this.column.compareTo(other.column) == 0) {
       return true;
       
     } else {
@@ -104,8 +104,8 @@
   }
   
   public boolean matchesWithoutColumn(HStoreKey other) {
-    if((this.row.compareTo(other.row) == 0) &&
-       (this.timestamp >= other.getTimestamp())) {
+    if ((this.row.compareTo(other.row) == 0) &&
+        (this.timestamp >= other.getTimestamp())) {
       return true;
       
     } else {
@@ -124,14 +124,14 @@
   public int compareTo(Object o) {
     HStoreKey other = (HStoreKey) o;
     int result = this.row.compareTo(other.row);
-    if(result == 0) {
+    if (result == 0) {
       result = this.column.compareTo(other.column);
       
-      if(result == 0) {
-        if(this.timestamp < other.timestamp) {
+      if (result == 0) {
+        if (this.timestamp < other.timestamp) {
           result = 1;
           
-        } else if(this.timestamp > other.timestamp) {
+        } else if (this.timestamp > other.timestamp) {
           result = -1;
         }
       }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Thu Apr 19 14:34:41 2007
@@ -54,7 +54,7 @@
 
   /** Do we contain a given column? */
   public boolean hasFamily(Text family) {
-    if(families.contains(family)) {
+    if (families.contains(family)) {
       return true;
       
     } else {
@@ -75,7 +75,7 @@
     name.write(out);
     out.writeInt(maxVersions);
     out.writeInt(families.size());
-    for(Iterator<Text> it = families.iterator(); it.hasNext(); ) {
+    for(Iterator<Text> it = families.iterator(); it.hasNext();) {
       it.next().write(out);
     }
   }
@@ -99,21 +99,21 @@
   public int compareTo(Object o) {
     HTableDescriptor htd = (HTableDescriptor) o;
     int result = name.compareTo(htd.name);
-    if(result == 0) {
+    if (result == 0) {
       result = maxVersions - htd.maxVersions;
     }
     
-    if(result == 0) {
+    if (result == 0) {
       result = families.size() - htd.families.size();
     }
     
-    if(result == 0) {
+    if (result == 0) {
       Iterator<Text> it2 = htd.families.iterator();
-      for(Iterator<Text> it = families.iterator(); it.hasNext(); ) {
+      for(Iterator<Text> it = families.iterator(); it.hasNext();) {
         Text family1 = it.next();
         Text family2 = it2.next();
         result = family1.compareTo(family2);
-        if(result != 0) {
+        if (result != 0) {
           return result;
         }
       }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java Thu Apr 19 14:34:41 2007
@@ -77,7 +77,7 @@
       synchronized(sortedLeases) {
         Lease lease = new Lease(holderId, resourceId, listener);
         Text leaseId = lease.getLeaseId();
-        if(leases.get(leaseId) != null) {
+        if (leases.get(leaseId) != null) {
           throw new IOException("Impossible state for createLease(): Lease for holderId " + holderId + " and resourceId " + resourceId + " is still held.");
         }
         leases.put(leaseId, lease);
@@ -92,7 +92,7 @@
       synchronized(sortedLeases) {
         Text leaseId = createLeaseId(holderId, resourceId);
         Lease lease = leases.get(leaseId);
-        if(lease == null) {
+        if (lease == null) {
           
           // It's possible that someone tries to renew the lease, but 
           // it just expired a moment ago.  So fail.
@@ -113,7 +113,7 @@
       synchronized(sortedLeases) {
         Text leaseId = createLeaseId(holderId, resourceId);
         Lease lease = leases.get(leaseId);
-        if(lease == null) {
+        if (lease == null) {
           
           // It's possible that someone tries to renew the lease, but 
           // it just expired a moment ago.  So fail.
@@ -139,7 +139,7 @@
             while((sortedLeases.size() > 0)
                   && ((top = sortedLeases.first()) != null)) {
               
-              if(top.shouldExpire()) {
+              if (top.shouldExpire()) {
                 leases.remove(top.getLeaseId());
                 sortedLeases.remove(top);
 
@@ -205,10 +205,10 @@
 
     public int compareTo(Object o) {
       Lease other = (Lease) o;
-      if(this.lastUpdate < other.lastUpdate) {
+      if (this.lastUpdate < other.lastUpdate) {
         return -1;
         
-      } else if(this.lastUpdate > other.lastUpdate) {
+      } else if (this.lastUpdate > other.lastUpdate) {
         return 1;
         
       } else {

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/Environment.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/Environment.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/Environment.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/Environment.java Thu Apr 19 14:34:41 2007
@@ -29,27 +29,27 @@
     String value = null;
     
     value = System.getenv("DEBUGGING");
-    if(value != null && value.equalsIgnoreCase("TRUE")) {
+    if (value != null && value.equalsIgnoreCase("TRUE")) {
       debugging = true;
     }
     
     value = System.getenv("LOGGING_LEVEL");
-    if(value != null && value.length() != 0) {
-      if(value.equalsIgnoreCase("ALL")) {
+    if (value != null && value.length() != 0) {
+      if (value.equalsIgnoreCase("ALL")) {
         logLevel = Level.ALL;
-      } else if(value.equalsIgnoreCase("DEBUG")) {
+      } else if (value.equalsIgnoreCase("DEBUG")) {
         logLevel = Level.DEBUG;
-      } else if(value.equalsIgnoreCase("ERROR")) {
+      } else if (value.equalsIgnoreCase("ERROR")) {
         logLevel = Level.ERROR;
-      } else if(value.equalsIgnoreCase("FATAL")) {
+      } else if (value.equalsIgnoreCase("FATAL")) {
         logLevel = Level.FATAL;
-      } else if(value.equalsIgnoreCase("INFO")) {
+      } else if (value.equalsIgnoreCase("INFO")) {
         logLevel = Level.INFO;
-      } else if(value.equalsIgnoreCase("OFF")) {
+      } else if (value.equalsIgnoreCase("OFF")) {
         logLevel = Level.OFF;
-      } else if(value.equalsIgnoreCase("TRACE")) {
+      } else if (value.equalsIgnoreCase("TRACE")) {
         logLevel = Level.TRACE;
-      } else if(value.equalsIgnoreCase("WARN")) {
+      } else if (value.equalsIgnoreCase("WARN")) {
         logLevel = Level.WARN;
       }
     }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java Thu Apr 19 14:34:41 2007
@@ -90,7 +90,7 @@
   
   public void testSetup() throws IOException {
     try {
-      if(System.getProperty("test.build.data") == null) {
+      if (System.getProperty("test.build.data") == null) {
         String dir = new File(new File("").getAbsolutePath(), "build/contrib/hbase/test").getAbsolutePath();
         System.out.println(dir);
         System.setProperty("test.build.data", dir);
@@ -98,7 +98,7 @@
       conf = new Configuration();
       
       Environment.getenv();
-      if(Environment.debugging) {
+      if (Environment.debugging) {
         Logger rootLogger = Logger.getRootLogger();
         rootLogger.setLevel(Level.WARN);
         
@@ -133,7 +133,7 @@
   // Test basic functionality. Writes to contents:basic and anchor:anchornum-*
 
   public void testBasic() throws IOException {
-    if(!initialized) {
+    if (!initialized) {
       throw new IllegalStateException();
     }
 
@@ -191,7 +191,7 @@
   // Test scanners. Writes contents:firstcol and anchor:secondcol
   
   public void testScan() throws IOException {
-    if(!initialized) {
+    if (!initialized) {
       throw new IllegalStateException();
     }
 
@@ -225,13 +225,13 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
 
           for(int j = 0; j < cols.length; j++) {
-            if(col.compareTo(cols[j]) == 0) {
+            if (col.compareTo(cols[j]) == 0) {
               assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                            + ", Value for " + col + " should be: " + k
                            + ", but was fetched as: " + curval, k, curval);
@@ -258,13 +258,13 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
 
           for(int j = 0; j < cols.length; j++) {
-            if(col.compareTo(cols[j]) == 0) {
+            if (col.compareTo(cols[j]) == 0) {
               assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                            + ", Value for " + col + " should be: " + k
                            + ", but was fetched as: " + curval, k, curval);
@@ -299,13 +299,13 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
 
           for(int j = 0; j < cols.length; j++) {
-            if(col.compareTo(cols[j]) == 0) {
+            if (col.compareTo(cols[j]) == 0) {
               assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                            + ", Value for " + col + " should be: " + k
                            + ", but was fetched as: " + curval, k, curval);
@@ -332,7 +332,7 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
@@ -362,7 +362,7 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 500;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
@@ -390,10 +390,10 @@
   // Creates contents:body
   
   public void testBatchWrite() throws IOException {
-    if(!initialized || failures) {
+    if (!initialized || failures) {
       throw new IllegalStateException();
     }
-    if(! Environment.debugging) {
+    if (!Environment.debugging) {
       return;
     }
 
@@ -437,7 +437,7 @@
         }
       }
       long startCompact = System.currentTimeMillis();
-      if(region.compactStores()) {
+      if (region.compactStores()) {
         totalCompact = System.currentTimeMillis() - startCompact;
         System.out.println("Region compacted - elapsedTime: " + (totalCompact / 1000.0));
         
@@ -467,14 +467,14 @@
   // NOTE: This test depends on testBatchWrite succeeding
   
   public void testSplitAndMerge() throws IOException {
-    if(!initialized || failures) {
+    if (!initialized || failures) {
       throw new IllegalStateException();
     }
     
     try {
       Text midKey = new Text();
       
-      if(region.needsSplit(midKey)) {
+      if (region.needsSplit(midKey)) {
         System.out.println("Needs split");
       }
       
@@ -504,7 +504,7 @@
   // This test verifies that everything is still there after splitting and merging
   
   public void testRead() throws IOException {
-    if(!initialized || failures) {
+    if (!initialized || failures) {
       throw new IllegalStateException();
     }
 
@@ -525,19 +525,19 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           String curval = new String(val).trim();
 
-          if(col.compareTo(CONTENTS_BASIC) == 0) {
+          if (col.compareTo(CONTENTS_BASIC) == 0) {
             assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                        + ", Value for " + col + " should start with: " + CONTENTSTR
                        + ", but was fetched as: " + curval,
                        curval.startsWith(CONTENTSTR));
             contentsFetched++;
             
-          } else if(col.toString().startsWith(ANCHORNUM)) {
+          } else if (col.toString().startsWith(ANCHORNUM)) {
             assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                        + ", Value for " + col + " should start with: " + ANCHORSTR
                        + ", but was fetched as: " + curval,
@@ -572,7 +572,7 @@
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       int k = 0;
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           Text col = it.next();
           byte val[] = curVals.get(col);
           int curval = Integer.parseInt(new String(val).trim());
@@ -596,7 +596,7 @@
     
     // Verify testBatchWrite data
 
-    if(Environment.debugging) {
+    if (Environment.debugging) {
       s = region.getScanner(new Text[] { CONTENTS_BODY }, new Text());
       try {
         int numFetched = 0;
@@ -604,7 +604,7 @@
         TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
         int k = 0;
         while(s.next(curKey, curVals)) {
-          for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+          for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
             Text col = it.next();
             byte val[] = curVals.get(col);
 
@@ -635,7 +635,7 @@
       HStoreKey curKey = new HStoreKey();
       TreeMap<Text, byte[]> curVals = new TreeMap<Text, byte[]>();
       while(s.next(curKey, curVals)) {
-        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext();) {
           it.next();
           fetched++;
         }
@@ -650,7 +650,7 @@
 
   
   private static void deleteFile(File f) {
-    if(f.isDirectory()) {
+    if (f.isDirectory()) {
       File[] children = f.listFiles();
       for(int i = 0; i < children.length; i++) {
         deleteFile(children[i]);
@@ -660,7 +660,7 @@
   }
   
   public void testCleanup() throws IOException {
-    if(!initialized) {
+    if (!initialized) {
       throw new IllegalStateException();
     }
 

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/CompoundDirSpec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/CompoundDirSpec.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/CompoundDirSpec.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/CompoundDirSpec.java Thu Apr 19 14:34:41 2007
@@ -219,7 +219,7 @@
   public static String expandGlobInputSpec(String inputSpec, JobConf job)
   {
     inputSpec = inputSpec.trim();
-    if(!inputSpec.startsWith(MERGEGLOB_PREFIX)) {
+    if (!inputSpec.startsWith(MERGEGLOB_PREFIX)) {
       return inputSpec;
     }
     inputSpec = inputSpec.substring(MERGEGLOB_PREFIX.length());

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java Thu Apr 19 14:34:41 2007
@@ -115,7 +115,7 @@
     JarEntry entry = null;
     while (entries.hasMoreElements()) {
       entry = (JarEntry) entries.nextElement();
-      //if(entry.getName().startsWith("META-INF/")) continue; 
+      //if (entry.getName().startsWith("META-INF/")) continue; 
       InputStream in = src.getInputStream(entry);
       addNamedStream(dst, entry.getName(), in);
     }

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/MergerInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/MergerInputFormat.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/MergerInputFormat.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/MergerInputFormat.java Thu Apr 19 14:34:41 2007
@@ -82,7 +82,7 @@
   /** Delegate to the primary InputFormat. 
       Force full-file splits since there's no index to sync secondaries.
       (and if there was, this index may need to be created for the first time
-      full file at a time...    )
+      full file at a time...   )
   */
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
     return ((StreamInputFormat) primary_).getSplits(job, numSplits);
@@ -121,7 +121,7 @@
   /*
     private FileSplit relatedSplit(FileSplit primarySplit, int i, CompoundDirSpec spec) throws IOException
     {
-    if(i == 0) {
+    if (i == 0) {
     return primarySplit;
     }
 
@@ -330,7 +330,7 @@
   Writable v_;
 
   public MergeRecordStream(int index, RecordReader reader, WritableComparable k, Writable v)
-      throws IOException {
+    throws IOException {
     index_ = index;
     reader_ = reader;
     k_ = k;

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java Thu Apr 19 14:34:41 2007
@@ -261,7 +261,7 @@
           finalOutputURI = new URI(sideEffectPathFinal_.toString()); // implicit dfs: 
         }
         // apply default scheme
-        if(finalOutputURI.getScheme() == null) {
+        if (finalOutputURI.getScheme() == null) {
           finalOutputURI = new URI("file", finalOutputURI.getSchemeSpecificPart(), null);
         }
         boolean allowSocket = useSingleSideOutputURI_;
@@ -579,7 +579,7 @@
           logprintln("closing " + finalOutputURI);
           if (sideEffectOut_ != null) sideEffectOut_.close();
           logprintln("closed  " + finalOutputURI);
-          if ( ! useSingleSideOutputURI_) {
+          if (!useSingleSideOutputURI_) {
             ((PhasedFileSystem)sideFs_).commit(); 
           }
         }

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java Thu Apr 19 14:34:41 2007
@@ -70,7 +70,7 @@
     if (outThread_ == null) {
       startOutputThreads(output, reporter);
     }
-    if( outerrThreadsThrowable != null ) {
+    if (outerrThreadsThrowable != null) {
       mapRedFinished();
       throw new IOException ("MROutput/MRErrThread failed:"
                              + StringUtils.stringifyException(

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Thu Apr 19 14:34:41 2007
@@ -74,10 +74,10 @@
         numRecRead_++;
         maybeLogRecord();
         if (doPipe_) {
-          if( outerrThreadsThrowable != null ) {
+          if (outerrThreadsThrowable != null) {
             mapRedFinished();
             throw new IOException ("MROutput/MRErrThread failed:"
-                                   + StringUtils.stringifyException( 
+                                   + StringUtils.stringifyException(
                                                                     outerrThreadsThrowable));
           }
           write(key);

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Thu Apr 19 14:34:41 2007
@@ -82,7 +82,7 @@
     new DefaultOptionBuilder("-","-", false);
   private ArgumentBuilder argBuilder = new ArgumentBuilder(); 
   private Parser parser = new Parser(); 
-  private Group allOptions ; 
+  private Group allOptions; 
   HelpFormatter helpFormatter = new HelpFormatter("  ", "  ", "  ", 900);
   // need these two at class level to extract values later from 
   // commons-cli command line
@@ -197,7 +197,7 @@
   }
 
   void parseArgv(){
-    CommandLine cmdLine = null ; 
+    CommandLine cmdLine = null; 
     try{
       cmdLine = parser.parse(argv_);
     }catch(Exception oe){
@@ -209,10 +209,10 @@
       }
     }
     
-    if( cmdLine != null ){
-      verbose_ =  cmdLine.hasOption("-verbose") ;
-      detailedUsage_ = cmdLine.hasOption("-info") ;
-      debug_ = cmdLine.hasOption("-debug")? debug_ + 1 : debug_ ;
+    if (cmdLine != null){
+      verbose_ =  cmdLine.hasOption("-verbose");
+      detailedUsage_ = cmdLine.hasOption("-info");
+      debug_ = cmdLine.hasOption("-debug")? debug_ + 1 : debug_;
       inputTagged_ = cmdLine.hasOption("-inputtagged"); 
       
       inputSpecs_.addAll(cmdLine.getValues("-input"));
@@ -230,12 +230,12 @@
       configPath_.addAll(cmdLine.getValues("-config"));
       
       String fsName = (String)cmdLine.getValue("-dfs");
-      if( null != fsName ){
+      if (null != fsName){
         userJobConfProps_.put("fs.default.name", fsName);        
       }
       
       String jt = (String)cmdLine.getValue("mapred.job.tracker");
-      if( null != jt ){
+      if (null != jt){
         userJobConfProps_.put("fs.default.name", jt);        
       }
       
@@ -246,15 +246,15 @@
       inReaderSpec_ = (String)cmdLine.getValue("-inputreader"); 
       
       List<String> car = cmdLine.getValues("-cacheArchive"); 
-      if( null != car ){
-        for( String s : car ){
+      if (null != car){
+        for(String s : car){
           cacheArchives = (cacheArchives == null)?s :cacheArchives + "," + s;  
         }
       }
 
       List<String> caf = cmdLine.getValues("-cacheFile"); 
-      if( null != caf ){
-        for( String s : caf ){
+      if (null != caf){
+        for(String s : caf){
           cacheFiles = (cacheFiles == null)?s :cacheFiles + "," + s;  
         }
       }
@@ -262,14 +262,14 @@
       List<String> jobConfArgs = (List<String>)cmdLine.getValue(jobconf); 
       List<String> envArgs = (List<String>)cmdLine.getValue(cmdenv); 
       
-      if( null != jobConfArgs ){
-        for( String s : jobConfArgs){
+      if (null != jobConfArgs){
+        for(String s : jobConfArgs){
           String []parts = s.split("="); 
           userJobConfProps_.put(parts[0], parts[1]);
         }
       }
-      if( null != envArgs ){
-        for( String s : envArgs ){
+      if (null != envArgs){
+        for(String s : envArgs){
           if (addTaskEnvironment_.length() > 0) {
             addTaskEnvironment_ += " ";
           }
@@ -310,7 +310,7 @@
       withMinimum(1).
       withMaximum(max).
       withValidator(validator).
-      create() ;
+      create();
    
     return builder.
       withLongName(name).
@@ -332,15 +332,15 @@
           // an can exec check in java 6
           for (String file : (List<String>)values) {
             File f = new File(file);  
-            if ( ! f.exists() ) {
+            if (!f.exists()) {
               throw new InvalidArgumentException("Argument : " + 
                                                  f.getAbsolutePath() + " doesn't exist."); 
             }
-            if ( ! f.isFile() ) {
+            if (!f.isFile()) {
               throw new InvalidArgumentException("Argument : " + 
                                                  f.getAbsolutePath() + " is not a file."); 
             }
-            if ( ! f.canRead() ) {
+            if (!f.canRead()) {
               throw new InvalidArgumentException("Argument : " + 
                                                  f.getAbsolutePath() + " is not accessible"); 
             }
@@ -378,7 +378,7 @@
     Option mapper  = createOption("mapper", 
                                   "The streaming command to run", "cmd", 1, false);
     Option combiner = createOption("combiner", 
-                                   "The streaming command to run", "cmd",1, false);
+                                   "The streaming command to run", "cmd", 1, false);
     // reducer could be NONE 
     Option reducer = createOption("reducer", 
                                   "The streaming command to run", "cmd", 1, false); 
@@ -388,21 +388,21 @@
     Option dfs = createOption("dfs", 
                               "Optional. Override DFS configuration", "<h:p>|local", 1, false); 
     Option jt = createOption("jt", 
-                             "Optional. Override JobTracker configuration", "<h:p>|local",1, false);
+                             "Optional. Override JobTracker configuration", "<h:p>|local", 1, false);
     Option additionalconfspec = createOption("additionalconfspec", 
-                                             "Optional.", "spec",1, false );
+                                             "Optional.", "spec", 1, false);
     Option inputformat = createOption("inputformat", 
-                                      "Optional.", "spec",1, false );
+                                      "Optional.", "spec", 1, false);
     Option outputformat = createOption("outputformat", 
-                                       "Optional.", "spec",1, false );
+                                       "Optional.", "spec", 1, false);
     Option partitioner = createOption("partitioner", 
-                                      "Optional.", "spec",1, false );
+                                      "Optional.", "spec", 1, false);
     Option inputreader = createOption("inputreader", 
-                                      "Optional.", "spec",1, false );
+                                      "Optional.", "spec", 1, false);
     Option cacheFile = createOption("cacheFile", 
                                     "File name URI", "fileNameURI", 1, false);
     Option cacheArchive = createOption("cacheArchive", 
-                                       "File name URI", "fileNameURI",1, false);
+                                       "File name URI", "fileNameURI", 1, false);
     
     // boolean properties
     
@@ -844,7 +844,7 @@
     if (cacheFiles != null)
       DistributedCache.setCacheFiles(fileURIs, jobConf_);
     
-    if(verbose_) {
+    if (verbose_) {
       listJobConfProperties();
     }
    
@@ -956,7 +956,7 @@
       LOG.info("To kill this job, run:");
       LOG.info(getHadoopClientHome() + "/bin/hadoop job  -Dmapred.job.tracker=" + hp + " -kill "
                + jobId_);
-      //LOG.info("Job file: " + running_.getJobFile() );
+      //LOG.info("Job file: " + running_.getJobFile());
       LOG.info("Tracking URL: " + StreamUtil.qualifyHost(running_.getTrackingURL()));
     }
   }
@@ -1012,7 +1012,7 @@
     }catch(FileAlreadyExistsException fae){
       LOG.error("Error launching job , Output path already exists : " 
                 + fae.getMessage());
-    }catch( IOException ioe){
+    }catch(IOException ioe){
       LOG.error("Error Launching job : " + ioe.getMessage());
     }
     finally {
@@ -1025,7 +1025,7 @@
   }
   /** Support -jobconf x=y x1=y1 type options **/
   class MultiPropertyOption extends PropertyOption{
-    private String optionString ; 
+    private String optionString; 
     MultiPropertyOption(){
       super(); 
     }
@@ -1033,7 +1033,7 @@
     MultiPropertyOption(final String optionString,
                         final String description,
                         final int id){
-      super(optionString, description, id) ; 
+      super(optionString, description, id); 
       this.optionString = optionString;
     }
 
@@ -1053,10 +1053,10 @@
       }
       
       ArrayList properties = new ArrayList(); 
-      String next = "" ; 
-      while( arguments.hasNext()){
+      String next = ""; 
+      while(arguments.hasNext()){
         next = (String) arguments.next();
-        if( ! next.startsWith("-") ){
+        if (!next.startsWith("-")){
           properties.add(next);
         }else{
           arguments.previous();
@@ -1064,9 +1064,9 @@
         }
       } 
 
-      // add to any existing values ( support specifying args multiple times)
-      List<String> oldVal = (List<String>)commandLine.getValue(this) ; 
-      if( oldVal == null ){
+      // add to any existing values (support specifying args multiple times)
+      List<String> oldVal = (List<String>)commandLine.getValue(this); 
+      if (oldVal == null){
         commandLine.addValue(this, properties);
       }else{
         oldVal.addAll(properties); 

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java Thu Apr 19 14:34:41 2007
@@ -494,10 +494,10 @@
   public static String getBoundAntProperty(String name, String defaultVal)
   {
     String val = System.getProperty(name);
-    if(val != null && val.indexOf("${") >= 0) {
+    if (val != null && val.indexOf("${") >= 0) {
       val = null;
     }
-    if(val == null) {
+    if (val == null) {
       val = defaultVal;
     }
     return val;

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java Thu Apr 19 14:34:41 2007
@@ -101,7 +101,7 @@
     ((Text) key).set(record);
     ((Text) value).set("");
 
-    /*if(numNext < 5) {
+    /*if (numNext < 5) {
       System.out.println("@@@ " + numNext + ". true next k=|" + key.toString().replaceAll("[\\r\\n]", " ")
       + "|, len=" + buf.length() + " v=|" + value.toString().replaceAll("[\\r\\n]", " ") + "|");
       }*/

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/UTF8ByteArrayUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/UTF8ByteArrayUtils.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/UTF8ByteArrayUtils.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/UTF8ByteArrayUtils.java Thu Apr 19 14:34:41 2007
@@ -39,7 +39,7 @@
    */
   public static int findTab(byte [] utf, int start, int length) {
     for(int i=start; i<(start+length); i++) {
-      if(utf[i]==(byte)'\t') {
+      if (utf[i]==(byte)'\t') {
         return i;
       }
     }
@@ -68,9 +68,9 @@
    */
   public static void splitKeyVal(byte[] utf, int start, int length, 
                                  Text key, Text val, int splitPos) throws IOException {
-    if(splitPos<start || splitPos >= (start+length))
-      throw new IllegalArgumentException( "splitPos must be in the range " +
-                                          "[" + start + ", " + (start+length) + "]: " + splitPos);
+    if (splitPos<start || splitPos >= (start+length))
+      throw new IllegalArgumentException("splitPos must be in the range " +
+                                         "[" + start + ", " + (start+length) + "]: " + splitPos);
     int keyLen = (splitPos-start);
     byte [] keyBytes = new byte[keyLen];
     System.arraycopy(utf, start, keyBytes, 0, keyLen);
@@ -122,7 +122,7 @@
       if (c == '\r') {
         in.mark(1);
         int c2 = in.read();
-        if(c2 == -1) {
+        if (c2 == -1) {
           isEOF = true;
           break;
         }
@@ -142,7 +142,7 @@
       buf[offset++] = (byte) c;
     }
 
-    if(isEOF && offset==0) {
+    if (isEOF && offset==0) {
       return null;
     } else {
       lineBuffer = new byte[offset];

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java Thu Apr 19 14:34:41 2007
@@ -217,7 +217,7 @@
     String overrideFS = StreamUtil.getBoundAntProperty("fs.default.name", null);
     MiniDFSCluster cluster = null;
     try {
-      if(overrideFS == null) {
+      if (overrideFS == null) {
         cluster = new MiniDFSCluster(conf_, 1, true, null);
         fs_ = cluster.getFileSystem();
       } else {
@@ -265,7 +265,7 @@
     } else {
       String userOut = StreamUtil.getBoundAntProperty(
                                                       "hadoop.test.localoutputfile", null);
-      if(userOut != null) {
+      if (userOut != null) {
         f = new File(userOut);
         // don't delete so they can mkfifo
         maybeFifoOutput_ = true;
@@ -275,7 +275,7 @@
         maybeFifoOutput_ = false;
       }
       String s = new Path(f.getAbsolutePath()).toString();
-      if(! s.startsWith("/")) {
+      if (!s.startsWith("/")) {
         s = "/" + s; // Windows "file:/C:/"
       }
       sideOutput = "file:" + s;
@@ -292,7 +292,7 @@
       }
       output = outputBuf.toString();
     } else {
-      if(maybeFifoOutput_) {
+      if (maybeFifoOutput_) {
         System.out.println("assertEquals will fail.");
         output = "potential FIFO: not retrieving to avoid blocking on open() "
           + f.getAbsoluteFile();

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java Thu Apr 19 14:34:41 2007
@@ -68,7 +68,7 @@
   void expect(String evName, String evVal) throws IOException
   {
     String got = env.getProperty(evName);
-    if(! evVal.equals(got)) {
+    if (!evVal.equals(got)) {
       String msg = "FAIL evName=" + evName + " got=" + got + " expect=" + evVal;
       throw new IOException(msg);
     }
@@ -77,7 +77,7 @@
   void expectDefined(String evName) throws IOException
   {
     String got = env.getProperty(evName);
-    if(got == null) {
+    if (got == null) {
       String msg = "FAIL evName=" + evName + " is undefined. Expect defined.";
       throw new IOException(msg);
     }
@@ -105,7 +105,7 @@
 
   public static String CUnescape(String s)
   {
-    if(s.equals("\\n")) {
+    if (s.equals("\\n")) {
       return "\n";
     } else {
       return s;

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UniqApp.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UniqApp.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UniqApp.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UniqApp.java Thu Apr 19 14:34:41 2007
@@ -39,7 +39,7 @@
     String line;
     String prevLine = null;
     while ((line = in.readLine()) != null) {
-      if(! line.equals(prevLine)) {
+      if (!line.equals(prevLine)) {
         System.out.println(header + line);
       }
       prevLine = line;

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java Thu Apr 19 14:34:41 2007
@@ -34,7 +34,7 @@
 
   void checkUserDir() {
     // trunk/src/contrib/streaming --> trunk/build/contrib/streaming/test/data
-    if(! userDir_.equals(antTestDir_)) {
+    if (!userDir_.equals(antTestDir_)) {
       // because changes to user.dir are ignored by File static methods.
       throw new IllegalStateException("user.dir != test.build.data. The junit Ant task must be forked.");
     }
@@ -43,7 +43,7 @@
   void redirectIfAntJunit() throws IOException
   {
     boolean fromAntJunit = System.getProperty("test.build.data") != null;
-    if(fromAntJunit) {
+    if (fromAntJunit) {
       new File(antTestDir_).mkdirs();
       File outFile = new File(antTestDir_, testName_+".log");
       PrintStream out = new PrintStream(new FileOutputStream(outFile));

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Thu Apr 19 14:34:41 2007
@@ -107,7 +107,7 @@
     this.finalResources = (ArrayList)other.finalResources.clone();
     if (other.properties != null)
       this.properties = (Properties)other.properties.clone();
-    if(other.overlay!=null)
+    if (other.overlay!=null)
       this.overlay = (Properties)other.overlay.clone();
   }
 
@@ -142,7 +142,7 @@
   }
 
   private synchronized void addResource(ArrayList<Object> resources,
-      Object resource) {
+                                        Object resource) {
     
     resources.add(resource);                      // add to resources
     properties = null;                            // trigger reload
@@ -172,23 +172,23 @@
   private static int MAX_SUBST = 20;
 
   private String substituteVars(String expr) {
-    if(expr == null) {
+    if (expr == null) {
       return null;
     }
     Matcher match = varPat.matcher("");
     String eval = expr;
     for(int s=0; s<MAX_SUBST; s++) {
       match.reset(eval);
-      if(! match.find()) {
+      if (!match.find()) {
         return eval;
       }
       String var = match.group();
       var = var.substring(2, var.length()-1); // remove ${ .. }
       String val = System.getProperty(var);
-      if(val == null) {
+      if (val == null) {
         val = (String)this.getObject(var);
       }
-      if(val == null) {
+      if (val == null) {
         return eval; // return literal ${var}: var is unbound
       }
       // substitute
@@ -211,7 +211,7 @@
   }
   
   private synchronized Properties getOverlay() {
-    if(overlay==null){
+    if (overlay==null){
       overlay=new Properties();
     }
     return overlay;
@@ -221,7 +221,7 @@
    * exists, then <code>defaultValue</code> is returned.
    */
   public String get(String name, String defaultValue) {
-     return substituteVars(getProps().getProperty(name, defaultValue));
+    return substituteVars(getProps().getProperty(name, defaultValue));
   }
     
   /** Returns the value of the <code>name</code> property as an integer.  If no
@@ -338,7 +338,7 @@
    * interface. 
    */
   public Class<?> getClass(String propertyName, Class<?> defaultValue,
-      Class<?> xface) {
+                           Class<?> xface) {
     
     try {
       Class<?> theClass = getClass(propertyName, defaultValue);
@@ -354,7 +354,7 @@
    * First checks that the class implements the named interface. 
    */
   public void setClass(String propertyName, Class<?> theClass,
-      Class<?> xface) {
+                       Class<?> xface) {
     
     if (!xface.isAssignableFrom(theClass))
       throw new RuntimeException(theClass+" not "+xface.getName());
@@ -380,7 +380,7 @@
       }
     }
     LOG.warn("Could not make " + path + 
-                " in local directories from " + dirsProp);
+             " in local directories from " + dirsProp);
     for(int i=0; i < dirs.length; i++) {
       int index = (hashCode+i & Integer.MAX_VALUE) % dirs.length;
       LOG.warn(dirsProp + "[" + index + "]=" + dirs[index]);
@@ -460,7 +460,7 @@
       loadResources(newProps, defaultResources, false, quietmode);
       loadResources(newProps, finalResources, true, true);
       properties = newProps;
-      if(overlay!=null)
+      if (overlay!=null)
         properties.putAll(overlay);
     }
     return properties;
@@ -575,7 +575,7 @@
         String name = (String)e.nextElement();
         Object object = properties.get(name);
         String value = null;
-        if(object instanceof String) {
+        if (object instanceof String) {
           value = (String) object;
         }else {
           continue;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/Block.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/Block.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/Block.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/Block.java Thu Apr 19 14:34:41 2007
@@ -112,7 +112,7 @@
   public void readFields(DataInput in) throws IOException {
     this.blkid = in.readLong();
     this.len = in.readLong();
-    if( len < 0 ) {
+    if (len < 0) {
       throw new IOException("Unexpected block size: " + len);
     }
   }
@@ -122,9 +122,9 @@
   /////////////////////////////////////
   public int compareTo(Object o) {
     Block b = (Block) o;
-    if ( blkid < b.blkid ) {
+    if (blkid < b.blkid) {
       return -1;
-    } else if ( blkid == b.blkid ) {
+    } else if (blkid == b.blkid) {
       return 0;
     } else {
       return 1;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/BlockCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/BlockCommand.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/BlockCommand.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/BlockCommand.java Thu Apr 19 14:34:41 2007
@@ -24,10 +24,10 @@
   DatanodeProtocol.DataNodeAction action;
   
   public DatanodeCommand() {
-    this( DatanodeProtocol.DataNodeAction.DNA_UNKNOWN );
+    this(DatanodeProtocol.DataNodeAction.DNA_UNKNOWN);
   }
   
-  public DatanodeCommand( DatanodeProtocol.DataNodeAction action ) {
+  public DatanodeCommand(DatanodeProtocol.DataNodeAction action) {
     this.action = action;
   }
 
@@ -43,12 +43,12 @@
   }
 
   public void write(DataOutput out) throws IOException {
-    WritableUtils.writeEnum( out, action );
+    WritableUtils.writeEnum(out, action);
   }
   
   public void readFields(DataInput in) throws IOException {
     this.action = (DatanodeProtocol.DataNodeAction)
-      WritableUtils.readEnum( in, DatanodeProtocol.DataNodeAction.class );
+      WritableUtils.readEnum(in, DatanodeProtocol.DataNodeAction.class);
   }
 }
 
@@ -62,81 +62,81 @@
  * @author Mike Cafarella
  ****************************************************/
 class BlockCommand extends DatanodeCommand {
-    Block blocks[];
-    DatanodeInfo targets[][];
+  Block blocks[];
+  DatanodeInfo targets[][];
 
-    public BlockCommand() {}
+  public BlockCommand() {}
 
-    /**
-     * Create BlockCommand for transferring blocks to another datanode
-     * @param blocks    blocks to be transferred 
-     * @param targets   nodes to transfer
-     */
-    public BlockCommand(Block blocks[], DatanodeInfo targets[][]) {
-      super(  DatanodeProtocol.DataNodeAction.DNA_TRANSFER );
-      this.blocks = blocks;
-      this.targets = targets;
-    }
+  /**
+   * Create BlockCommand for transferring blocks to another datanode
+   * @param blocks    blocks to be transferred 
+   * @param targets   nodes to transfer
+   */
+  public BlockCommand(Block blocks[], DatanodeInfo targets[][]) {
+    super( DatanodeProtocol.DataNodeAction.DNA_TRANSFER);
+    this.blocks = blocks;
+    this.targets = targets;
+  }
 
-    /**
-     * Create BlockCommand for block invalidation
-     * @param blocks  blocks to invalidate
-     */
-    public BlockCommand(Block blocks[]) {
-      super( DatanodeProtocol.DataNodeAction.DNA_INVALIDATE );
-      this.blocks = blocks;
-      this.targets = new DatanodeInfo[0][];
-    }
+  /**
+   * Create BlockCommand for block invalidation
+   * @param blocks  blocks to invalidate
+   */
+  public BlockCommand(Block blocks[]) {
+    super(DatanodeProtocol.DataNodeAction.DNA_INVALIDATE);
+    this.blocks = blocks;
+    this.targets = new DatanodeInfo[0][];
+  }
 
-    public Block[] getBlocks() {
-        return blocks;
-    }
+  public Block[] getBlocks() {
+    return blocks;
+  }
 
-    public DatanodeInfo[][] getTargets() {
-        return targets;
-    }
+  public DatanodeInfo[][] getTargets() {
+    return targets;
+  }
+
+  ///////////////////////////////////////////
+  // Writable
+  ///////////////////////////////////////////
+  static {                                      // register a ctor
+    WritableFactories.setFactory
+      (BlockCommand.class,
+       new WritableFactory() {
+         public Writable newInstance() { return new BlockCommand(); }
+       });
+  }
 
-    ///////////////////////////////////////////
-    // Writable
-    ///////////////////////////////////////////
-    static {                                      // register a ctor
-      WritableFactories.setFactory
-        (BlockCommand.class,
-         new WritableFactory() {
-           public Writable newInstance() { return new BlockCommand(); }
-         });
+  public void write(DataOutput out) throws IOException {
+    super.write(out);
+    out.writeInt(blocks.length);
+    for (int i = 0; i < blocks.length; i++) {
+      blocks[i].write(out);
+    }
+    out.writeInt(targets.length);
+    for (int i = 0; i < targets.length; i++) {
+      out.writeInt(targets[i].length);
+      for (int j = 0; j < targets[i].length; j++) {
+        targets[i][j].write(out);
+      }
     }
+  }
 
-    public void write(DataOutput out) throws IOException {
-        super.write( out );
-        out.writeInt(blocks.length);
-        for (int i = 0; i < blocks.length; i++) {
-            blocks[i].write(out);
-        }
-        out.writeInt(targets.length);
-        for (int i = 0; i < targets.length; i++) {
-            out.writeInt(targets[i].length);
-            for (int j = 0; j < targets[i].length; j++) {
-                targets[i][j].write(out);
-            }
-        }
+  public void readFields(DataInput in) throws IOException {
+    super.readFields(in);
+    this.blocks = new Block[in.readInt()];
+    for (int i = 0; i < blocks.length; i++) {
+      blocks[i] = new Block();
+      blocks[i].readFields(in);
     }
 
-    public void readFields(DataInput in) throws IOException {
-        super.readFields( in );
-        this.blocks = new Block[in.readInt()];
-        for (int i = 0; i < blocks.length; i++) {
-            blocks[i] = new Block();
-            blocks[i].readFields(in);
-        }
-
-        this.targets = new DatanodeInfo[in.readInt()][];
-        for (int i = 0; i < targets.length; i++) {
-            this.targets[i] = new DatanodeInfo[in.readInt()];
-            for (int j = 0; j < targets[i].length; j++) {
-                targets[i][j] = new DatanodeInfo();
-                targets[i][j].readFields(in);
-            }
-        }
+    this.targets = new DatanodeInfo[in.readInt()][];
+    for (int i = 0; i < targets.length; i++) {
+      this.targets[i] = new DatanodeInfo[in.readInt()];
+      for (int j = 0; j < targets[i].length; j++) {
+        targets[i][j] = new DatanodeInfo();
+        targets[i][j].readFields(in);
+      }
     }
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java?view=diff&rev=530556&r1=530555&r2=530556
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java Thu Apr 19 14:34:41 2007
@@ -61,12 +61,12 @@
    * create multi-block files must also use reportWrittenBlock()
    * and addBlock().
    */
-  public LocatedBlock create( String src, 
-                              String clientName, 
-                              boolean overwrite, 
-                              short replication,
-                              long blockSize
-                              ) throws IOException;
+  public LocatedBlock create(String src, 
+                             String clientName, 
+                             boolean overwrite, 
+                             short replication,
+                             long blockSize
+                             ) throws IOException;
 
   /**
    * Set replication for an existing file.
@@ -83,9 +83,9 @@
    *         false if file does not exist or is a directory
    * @author shv
    */
-  public boolean setReplication( String src, 
-                                 short replication
-                                 ) throws IOException;
+  public boolean setReplication(String src, 
+                                short replication
+                                ) throws IOException;
 
   /**
    * If the client has not yet called reportWrittenBlock(), it can
@@ -261,7 +261,7 @@
    * <p>
    * Safe mode is entered automatically at name node startup.
    * Safe mode can also be entered manually using
-   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode( SafeModeAction.SAFEMODE_GET )}.
+   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode(SafeModeAction.SAFEMODE_GET)}.
    * <p>
    * At startup the name node accepts data node reports collecting
    * information about block locations.
@@ -277,11 +277,11 @@
    * Then the name node leaves safe mode.
    * <p>
    * If safe mode is turned on manually using
-   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode( SafeModeAction.SAFEMODE_ENTER )}
+   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode(SafeModeAction.SAFEMODE_ENTER)}
    * then the name node stays in safe mode until it is manually turned off
-   * using {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode( SafeModeAction.SAFEMODE_LEAVE )}.
+   * using {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode(SafeModeAction.SAFEMODE_LEAVE)}.
    * Current state of the name node can be verified using
-   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode( SafeModeAction.SAFEMODE_GET )}
+   * {@link #setSafeMode(FSConstants.SafeModeAction) setSafeMode(SafeModeAction.SAFEMODE_GET)}
    * <h4>Configuration parameters:</h4>
    * <tt>dfs.safemode.threshold.pct</tt> is the threshold parameter.<br>
    * <tt>dfs.safemode.extension</tt> is the safe mode extension parameter.<br>
@@ -304,7 +304,7 @@
    * @throws IOException
    * @author Konstantin Shvachko
    */
-  public boolean setSafeMode( FSConstants.SafeModeAction action ) throws IOException;
+  public boolean setSafeMode(FSConstants.SafeModeAction action) throws IOException;
 
   /**
    * Tells the namenode to reread the hosts and exclude files. 



Mime
View raw message