hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r529410 [25/27] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/examples/org/apache/hadoop/abacus/examples/ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/...
Date Mon, 16 Apr 2007 21:44:46 GMT
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java Mon Apr 16 14:44:35 2007
@@ -33,11 +33,11 @@
     System.getProperty("test.build.data",".") + "/test.array";
 
   public TestArrayFile(String name) { 
-      super(name); 
+    super(name); 
   }
 
   public void testArrayFile() throws Exception {
-      Configuration conf = new Configuration();
+    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     RandomDatum[] data = generate(10000);
     writeTest(fs, data, FILE);
@@ -116,40 +116,40 @@
     Path fpath = null;
     FileSystem fs = null;
     try {
-        for (; i < args.length; i++) {       // parse command line
-            if (args[i] == null) {
-                continue;
-            } else if (args[i].equals("-count")) {
-                count = Integer.parseInt(args[++i]);
-            } else if (args[i].equals("-nocreate")) {
-                create = false;
-            } else if (args[i].equals("-nocheck")) {
-                check = false;
-            } else {                                       
-                // file is required parameter
-                file = args[i];
-                fpath=new Path(file);
-            }
+      for (; i < args.length; i++) {       // parse command line
+        if (args[i] == null) {
+          continue;
+        } else if (args[i].equals("-count")) {
+          count = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-nocreate")) {
+          create = false;
+        } else if (args[i].equals("-nocheck")) {
+          check = false;
+        } else {                                       
+          // file is required parameter
+          file = args[i];
+          fpath=new Path(file);
         }
+      }
         
-        fs = fpath.getFileSystem(conf);
+      fs = fpath.getFileSystem(conf);
         
-        LOG.info("count = " + count);
-        LOG.info("create = " + create);
-        LOG.info("check = " + check);
-        LOG.info("file = " + file);
+      LOG.info("count = " + count);
+      LOG.info("create = " + create);
+      LOG.info("check = " + check);
+      LOG.info("file = " + file);
 
-        RandomDatum[] data = generate(count);
+      RandomDatum[] data = generate(count);
 
-        if (create) {
-            writeTest(fs, data, file);
-        }
+      if (create) {
+        writeTest(fs, data, file);
+      }
 
-        if (check) {
-            readTest(fs, data, file, conf);
-        }
+      if (check) {
+        readTest(fs, data, file, conf);
+      }
     } finally {
-        fs.close();
+      fs.close();
     }
   }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java Mon Apr 16 14:44:35 2007
@@ -61,10 +61,10 @@
   
   public void testCompare() throws Exception {
     byte[][] values = new byte[][]{"abc".getBytes(), 
-        "ad".getBytes(),
-        "abcd".getBytes(),
-        "".getBytes(),
-        "b".getBytes()};
+                                   "ad".getBytes(),
+                                   "abcd".getBytes(),
+                                   "".getBytes(),
+                                   "b".getBytes()};
     BytesWritable[] buf = new BytesWritable[values.length];
     for(int i=0; i < values.length; ++i) {
       buf[i] = new BytesWritable(values[i]);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Mon Apr 16 14:44:35 2007
@@ -54,7 +54,7 @@
       CompressionCodec lzoCodec = null;
       try {
         lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(
-                conf.getClassByName(LzoCodec.class.getName()), conf);
+                                                                  conf.getClassByName(LzoCodec.class.getName()), conf);
       } catch (ClassNotFoundException cnfe) {
         throw new IOException("Cannot find LzoCodec!");
       }
@@ -79,76 +79,76 @@
 
     FileSystem fs = FileSystem.getLocal(conf);
     try {
-        // SequenceFile.Writer
-        writeTest(fs, count, seed, file, CompressionType.NONE, null);
-        readTest(fs, count, seed, file);
-
-        sortTest(fs, count, megabytes, factor, false, file);
-        checkSort(fs, count, seed, file);
+      // SequenceFile.Writer
+      writeTest(fs, count, seed, file, CompressionType.NONE, null);
+      readTest(fs, count, seed, file);
 
-        sortTest(fs, count, megabytes, factor, true, file);
-        checkSort(fs, count, seed, file);
+      sortTest(fs, count, megabytes, factor, false, file);
+      checkSort(fs, count, seed, file);
 
-        mergeTest(fs, count, seed, file, CompressionType.NONE, false, 
-            factor, megabytes);
-        checkSort(fs, count, seed, file);
+      sortTest(fs, count, megabytes, factor, true, file);
+      checkSort(fs, count, seed, file);
 
-        mergeTest(fs, count, seed, file, CompressionType.NONE, true, 
-            factor, megabytes);
-        checkSort(fs, count, seed, file);
+      mergeTest(fs, count, seed, file, CompressionType.NONE, false, 
+                factor, megabytes);
+      checkSort(fs, count, seed, file);
+
+      mergeTest(fs, count, seed, file, CompressionType.NONE, true, 
+                factor, megabytes);
+      checkSort(fs, count, seed, file);
         
-        // SequenceFile.RecordCompressWriter
-        writeTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD, 
-            codec);
-        readTest(fs, count, seed, recordCompressedFile);
-
-        sortTest(fs, count, megabytes, factor, false, recordCompressedFile);
-        checkSort(fs, count, seed, recordCompressedFile);
-
-        sortTest(fs, count, megabytes, factor, true, recordCompressedFile);
-        checkSort(fs, count, seed, recordCompressedFile);
-
-        mergeTest(fs, count, seed, recordCompressedFile, 
-            CompressionType.RECORD, false, factor, megabytes);
-        checkSort(fs, count, seed, recordCompressedFile);
-
-        mergeTest(fs, count, seed, recordCompressedFile, 
-            CompressionType.RECORD, true, factor, megabytes);
-        checkSort(fs, count, seed, recordCompressedFile);
+      // SequenceFile.RecordCompressWriter
+      writeTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD, 
+                codec);
+      readTest(fs, count, seed, recordCompressedFile);
+
+      sortTest(fs, count, megabytes, factor, false, recordCompressedFile);
+      checkSort(fs, count, seed, recordCompressedFile);
+
+      sortTest(fs, count, megabytes, factor, true, recordCompressedFile);
+      checkSort(fs, count, seed, recordCompressedFile);
+
+      mergeTest(fs, count, seed, recordCompressedFile, 
+                CompressionType.RECORD, false, factor, megabytes);
+      checkSort(fs, count, seed, recordCompressedFile);
+
+      mergeTest(fs, count, seed, recordCompressedFile, 
+                CompressionType.RECORD, true, factor, megabytes);
+      checkSort(fs, count, seed, recordCompressedFile);
         
-        // SequenceFile.BlockCompressWriter
-        writeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
-            codec);
-        readTest(fs, count, seed, blockCompressedFile);
-
-        sortTest(fs, count, megabytes, factor, false, blockCompressedFile);
-        checkSort(fs, count, seed, blockCompressedFile);
-
-        sortTest(fs, count, megabytes, factor, true, blockCompressedFile);
-        checkSort(fs, count, seed, blockCompressedFile);
-
-        mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK, 
-            false, factor, megabytes);
-        checkSort(fs, count, seed, blockCompressedFile);
-
-        mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK, 
-            true, factor, megabytes);
-        checkSort(fs, count, seed, blockCompressedFile);
+      // SequenceFile.BlockCompressWriter
+      writeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
+                codec);
+      readTest(fs, count, seed, blockCompressedFile);
+
+      sortTest(fs, count, megabytes, factor, false, blockCompressedFile);
+      checkSort(fs, count, seed, blockCompressedFile);
+
+      sortTest(fs, count, megabytes, factor, true, blockCompressedFile);
+      checkSort(fs, count, seed, blockCompressedFile);
+
+      mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK, 
+                false, factor, megabytes);
+      checkSort(fs, count, seed, blockCompressedFile);
+
+      mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK, 
+                true, factor, megabytes);
+      checkSort(fs, count, seed, blockCompressedFile);
 
-        } finally {
-        fs.close();
+    } finally {
+      fs.close();
     }
   }
 
   private static void writeTest(FileSystem fs, int count, int seed, Path file, 
-      CompressionType compressionType, CompressionCodec codec)
+                                CompressionType compressionType, CompressionCodec codec)
     throws IOException {
     fs.delete(file);
     LOG.info("creating " + count + " records with " + compressionType +
-              " compression");
+             " compression");
     SequenceFile.Writer writer = 
       SequenceFile.createWriter(fs, conf, file, 
-          RandomDatum.class, RandomDatum.class, compressionType, codec);
+                                RandomDatum.class, RandomDatum.class, compressionType, codec);
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
     for (int i = 0; i < count; i++) {
       generator.next();
@@ -278,7 +278,7 @@
       fs.delete(names[i]);
       fs.delete(sortedNames[i]);
       writers[i] = SequenceFile.createWriter(fs, conf, names[i], 
-          RandomDatum.class, RandomDatum.class, compressionType);
+                                             RandomDatum.class, RandomDatum.class, compressionType);
     }
 
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
@@ -350,7 +350,7 @@
       }
       // SequenceFile.RecordCompressWriter
       writeMetadataTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD, 
-          codec, theMetadata);
+                        codec, theMetadata);
       aMetadata = readMetadata(fs, recordCompressedFile);
       if (!theMetadata.equals(aMetadata)) {
         LOG.info("The original metadata:\n" + theMetadata.toString());
@@ -359,7 +359,7 @@
       }
       // SequenceFile.BlockCompressWriter
       writeMetadataTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
-          codec, theMetadata);
+                        codec, theMetadata);
       aMetadata =readMetadata(fs, blockCompressedFile);
       if (!theMetadata.equals(aMetadata)) {
         LOG.info("The original metadata:\n" + theMetadata.toString());
@@ -374,23 +374,23 @@
   
   
   private static SequenceFile.Metadata readMetadata(FileSystem fs, Path file)
-  throws IOException {
-  LOG.info("reading file: " + file.toString() + "\n");
-  SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
-  SequenceFile.Metadata meta = reader.getMetadata(); 
-  reader.close();
-  return meta;
+    throws IOException {
+    LOG.info("reading file: " + file.toString() + "\n");
+    SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
+    SequenceFile.Metadata meta = reader.getMetadata(); 
+    reader.close();
+    return meta;
   }
 
   private static void writeMetadataTest(FileSystem fs, int count, int seed, Path file, 
-      CompressionType compressionType, CompressionCodec codec, SequenceFile.Metadata metadata)
+                                        CompressionType compressionType, CompressionCodec codec, SequenceFile.Metadata metadata)
     throws IOException {
     fs.delete(file);
     LOG.info("creating " + count + " records with metadata and with" + compressionType +
-              " compression");
+             " compression");
     SequenceFile.Writer writer = 
       SequenceFile.createWriter(fs, conf, file, 
-          RandomDatum.class, RandomDatum.class, compressionType, codec, null, metadata);
+                                RandomDatum.class, RandomDatum.class, compressionType, codec, null, metadata);
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
     for (int i = 0; i < count; i++) {
       generator.next();
@@ -418,94 +418,94 @@
     int seed = new Random().nextInt();
 
     String usage = "Usage: SequenceFile " +
-        "[-count N] " + 
-        "[-seed #] [-check] [-compressType <NONE|RECORD|BLOCK>] " + 
-        "-codec <compressionCodec> " + 
-        "[[-rwonly] | {[-megabytes M] [-factor F] [-nocreate] [-fast] [-merge]}] " +
-        " file";
+      "[-count N] " + 
+      "[-seed #] [-check] [-compressType <NONE|RECORD|BLOCK>] " + 
+      "-codec <compressionCodec> " + 
+      "[[-rwonly] | {[-megabytes M] [-factor F] [-nocreate] [-fast] [-merge]}] " +
+      " file";
     if (args.length == 0) {
-        System.err.println(usage);
-        System.exit(-1);
+      System.err.println(usage);
+      System.exit(-1);
     }
     
     FileSystem fs = null;
     try {
       for (int i=0; i < args.length; ++i) {       // parse command line
-          if (args[i] == null) {
-              continue;
-          } else if (args[i].equals("-count")) {
-              count = Integer.parseInt(args[++i]);
-          } else if (args[i].equals("-megabytes")) {
-              megabytes = Integer.parseInt(args[++i]);
-          } else if (args[i].equals("-factor")) {
-            factor = Integer.parseInt(args[++i]);
-          } else if (args[i].equals("-seed")) {
-            seed = Integer.parseInt(args[++i]);
-          } else if (args[i].equals("-rwonly")) {
-              rwonly = true;
-          } else if (args[i].equals("-nocreate")) {
-              create = false;
-          } else if (args[i].equals("-check")) {
-              check = true;
-          } else if (args[i].equals("-fast")) {
-              fast = true;
-          } else if (args[i].equals("-merge")) {
-              merge = true;
-          } else if (args[i].equals("-compressType")) {
-              compressType = args[++i];
-          } else if (args[i].equals("-codec")) {
-              compressionCodec = args[++i];
-          } else {
-              // file is required parameter
-              file = new Path(args[i]);
-          }
+        if (args[i] == null) {
+          continue;
+        } else if (args[i].equals("-count")) {
+          count = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-megabytes")) {
+          megabytes = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-factor")) {
+          factor = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-seed")) {
+          seed = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-rwonly")) {
+          rwonly = true;
+        } else if (args[i].equals("-nocreate")) {
+          create = false;
+        } else if (args[i].equals("-check")) {
+          check = true;
+        } else if (args[i].equals("-fast")) {
+          fast = true;
+        } else if (args[i].equals("-merge")) {
+          merge = true;
+        } else if (args[i].equals("-compressType")) {
+          compressType = args[++i];
+        } else if (args[i].equals("-codec")) {
+          compressionCodec = args[++i];
+        } else {
+          // file is required parameter
+          file = new Path(args[i]);
         }
+      }
         
-      	fs = file.getFileSystem(conf);
+      fs = file.getFileSystem(conf);
 
-        LOG.info("count = " + count);
-        LOG.info("megabytes = " + megabytes);
-        LOG.info("factor = " + factor);
-        LOG.info("create = " + create);
-        LOG.info("seed = " + seed);
-        LOG.info("rwonly = " + rwonly);
-        LOG.info("check = " + check);
-        LOG.info("fast = " + fast);
-        LOG.info("merge = " + merge);
-        LOG.info("compressType = " + compressType);
-        LOG.info("compressionCodec = " + compressionCodec);
-        LOG.info("file = " + file);
-
-        if (rwonly && (!create || merge || fast)) {
-          System.err.println(usage);
-          System.exit(-1);
-        }
+      LOG.info("count = " + count);
+      LOG.info("megabytes = " + megabytes);
+      LOG.info("factor = " + factor);
+      LOG.info("create = " + create);
+      LOG.info("seed = " + seed);
+      LOG.info("rwonly = " + rwonly);
+      LOG.info("check = " + check);
+      LOG.info("fast = " + fast);
+      LOG.info("merge = " + merge);
+      LOG.info("compressType = " + compressType);
+      LOG.info("compressionCodec = " + compressionCodec);
+      LOG.info("file = " + file);
 
-        CompressionType compressionType = 
-          CompressionType.valueOf(compressType);
-        CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(
-                                    conf.getClassByName(compressionCodec), 
-                                    conf);
-
-        if (rwonly || (create && !merge)) {
-            writeTest(fs, count, seed, file, compressionType, codec);
-            readTest(fs, count, seed, file);
-        }
+      if (rwonly && (!create || merge || fast)) {
+        System.err.println(usage);
+        System.exit(-1);
+      }
 
-        if (!rwonly) {
-          if (merge) {
-            mergeTest(fs, count, seed, file, compressionType, 
-                fast, factor, megabytes);
-          } else {
-            sortTest(fs, count, megabytes, factor, fast, file);
-          }
+      CompressionType compressionType = 
+        CompressionType.valueOf(compressType);
+      CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(
+                                                                             conf.getClassByName(compressionCodec), 
+                                                                             conf);
+
+      if (rwonly || (create && !merge)) {
+        writeTest(fs, count, seed, file, compressionType, codec);
+        readTest(fs, count, seed, file);
+      }
+
+      if (!rwonly) {
+        if (merge) {
+          mergeTest(fs, count, seed, file, compressionType, 
+                    fast, factor, megabytes);
+        } else {
+          sortTest(fs, count, megabytes, factor, fast, file);
         }
+      }
     
-        if (check) {
-            checkSort(fs, count, seed, file);
-        }
-      } finally {
-          fs.close();
+      if (check) {
+        checkSort(fs, count, seed, file);
       }
+    } finally {
+      fs.close();
+    }
   }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java Mon Apr 16 14:44:35 2007
@@ -41,14 +41,14 @@
   public void testSetFile() throws Exception {
     FileSystem fs = FileSystem.getLocal(conf);
     try {
-        RandomDatum[] data = generate(10000);
-        writeTest(fs, data, FILE, CompressionType.NONE);
-        readTest(fs, data, FILE);
+      RandomDatum[] data = generate(10000);
+      writeTest(fs, data, FILE, CompressionType.NONE);
+      readTest(fs, data, FILE);
 
-        writeTest(fs, data, FILE, CompressionType.BLOCK);
-        readTest(fs, data, FILE);
+      writeTest(fs, data, FILE, CompressionType.BLOCK);
+      readTest(fs, data, FILE);
     } finally {
-        fs.close();
+      fs.close();
     }
   }
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestText.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestText.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestText.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestText.java Mon Apr 16 14:44:35 2007
@@ -42,17 +42,17 @@
     StringBuffer buffer = new StringBuffer();    
     int length = (len==RAND_LEN) ? RANDOM.nextInt(1000) : len;
     while (buffer.length()<length) {
-        int codePoint = RANDOM.nextInt(Character.MAX_CODE_POINT);
-        char tmpStr[] = new char[2];
-        if(Character.isDefined(codePoint)) {
-            //unpaired surrogate
-            if(codePoint < Character.MIN_SUPPLEMENTARY_CODE_POINT &&
-                    !Character.isHighSurrogate((char)codePoint) &&
-                    !Character.isLowSurrogate((char)codePoint) ) {
-               Character.toChars(codePoint, tmpStr, 0);
-               buffer.append(tmpStr);
-            }
+      int codePoint = RANDOM.nextInt(Character.MAX_CODE_POINT);
+      char tmpStr[] = new char[2];
+      if(Character.isDefined(codePoint)) {
+        //unpaired surrogate
+        if(codePoint < Character.MIN_SUPPLEMENTARY_CODE_POINT &&
+           !Character.isHighSurrogate((char)codePoint) &&
+           !Character.isLowSurrogate((char)codePoint) ) {
+          Character.toChars(codePoint, tmpStr, 0);
+          buffer.append(tmpStr);
         }
+      }
     }
     return buffer.toString();
   }
@@ -62,53 +62,53 @@
   }
   
   public static String getLongString() throws Exception {
-      String str = getTestString();
-      int length = Short.MAX_VALUE+str.length();
-      StringBuffer buffer = new StringBuffer();
-      while(buffer.length()<length)
-          buffer.append(str);
+    String str = getTestString();
+    int length = Short.MAX_VALUE+str.length();
+    StringBuffer buffer = new StringBuffer();
+    while(buffer.length()<length)
+      buffer.append(str);
       
-      return buffer.toString();
+    return buffer.toString();
   }
 
   public void testWritable() throws Exception {
     for (int i = 0; i < NUM_ITERATIONS; i++) {
-        String str;
-        if(i == 0 )
-            str = getLongString();
-        else
-            str = getTestString();
-        TestWritable.testWritable(new Text(str));
+      String str;
+      if(i == 0 )
+        str = getLongString();
+      else
+        str = getTestString();
+      TestWritable.testWritable(new Text(str));
     }
   }
 
 
   public void testCoding() throws Exception {
-      String before = "Bad \t encoding \t testcase";
-      Text text = new Text(before);
-      String after = text.toString();
-      assertTrue(before.equals(after));
+    String before = "Bad \t encoding \t testcase";
+    Text text = new Text(before);
+    String after = text.toString();
+    assertTrue(before.equals(after));
 
-      for (int i = 0; i < NUM_ITERATIONS; i++) {
-          // generate a random string
-          if(i == 0 )
-              before = getLongString();
-          else
-              before = getTestString();
+    for (int i = 0; i < NUM_ITERATIONS; i++) {
+      // generate a random string
+      if(i == 0 )
+        before = getLongString();
+      else
+        before = getTestString();
     
-          // test string to utf8
-          ByteBuffer bb = Text.encode(before);
+      // test string to utf8
+      ByteBuffer bb = Text.encode(before);
           
-          byte[] utf8Text = bb.array();
-          byte[] utf8Java = before.getBytes("UTF-8");
-          assertEquals(0, WritableComparator.compareBytes(
-                      utf8Text, 0, bb.limit(),
-                      utf8Java, 0, utf8Java.length));
+      byte[] utf8Text = bb.array();
+      byte[] utf8Java = before.getBytes("UTF-8");
+      assertEquals(0, WritableComparator.compareBytes(
+                                                      utf8Text, 0, bb.limit(),
+                                                      utf8Java, 0, utf8Java.length));
               
-          // test utf8 to string
-          after = Text.decode(utf8Java);
-          assertTrue(before.equals(after));
-      }
+      // test utf8 to string
+      after = Text.decode(utf8Java);
+      assertTrue(before.equals(after));
+    }
   }
   
   
@@ -117,90 +117,90 @@
     DataInputBuffer in = new DataInputBuffer();
 
     for (int i = 0; i < NUM_ITERATIONS; i++) {
-        // generate a random string
-        String before;          
-        if(i == 0 )
-            before = getLongString();
-        else
-            before = getTestString();
+      // generate a random string
+      String before;          
+      if(i == 0 )
+        before = getLongString();
+      else
+        before = getTestString();
         
-        // write it
-        out.reset();
-        Text.writeString(out, before);
+      // write it
+      out.reset();
+      Text.writeString(out, before);
         
-        // test that it reads correctly
-        in.reset(out.getData(), out.getLength());
-        String after = Text.readString(in);
-        assertTrue(before.equals(after));
+      // test that it reads correctly
+      in.reset(out.getData(), out.getLength());
+      String after = Text.readString(in);
+      assertTrue(before.equals(after));
         
-        // Test compatibility with Java's other decoder 
-        int strLenSize = WritableUtils.getVIntSize(Text.utf8Length(before));
-        String after2 = new String(out.getData(), strLenSize, 
-                out.getLength()-strLenSize, "UTF-8");
-        assertTrue(before.equals(after2));
-      }
+      // Test compatibility with Java's other decoder 
+      int strLenSize = WritableUtils.getVIntSize(Text.utf8Length(before));
+      String after2 = new String(out.getData(), strLenSize, 
+                                 out.getLength()-strLenSize, "UTF-8");
+      assertTrue(before.equals(after2));
+    }
   }
 
   public void testCompare() throws Exception {
-      DataOutputBuffer out1 = new DataOutputBuffer();
-      DataOutputBuffer out2 = new DataOutputBuffer();
-      DataOutputBuffer out3 = new DataOutputBuffer();
-      Text.Comparator comparator = new Text.Comparator();
-      for (int i=0; i<NUM_ITERATIONS; i++ ) {
-          // reset output buffer
-          out1.reset();
-          out2.reset();
-          out3.reset();
-
-          // generate two random strings
-          String str1 = getTestString();
-          String str2 = getTestString();
-          if(i == 0 ) {
-              str1 = getLongString();
-              str2 = getLongString();
-          } else {
-              str1 = getTestString();
-              str2 = getTestString();
-          }
-          
-          // convert to texts
-          Text txt1 = new Text(str1);
-          Text txt2 = new Text(str2);
-          Text txt3 = new Text(str1);
-          
-          // serialize them
-          txt1.write(out1);
-          txt2.write(out2);
-          txt3.write(out3);
-          
-          // compare two strings by looking at their binary formats
-          int ret1 = comparator.compare(out1.getData(), 0, out1.getLength(),
-                  out2.getData(), 0, out2.getLength());
-          // compare two strings
-          int ret2 = txt1.compareTo(txt2);
-          
-          assertEquals(ret1, ret2);
-          
-          // test equal
-          assertEquals(txt1.compareTo(txt3), 0);
-          assertEquals(comparator.compare(out1.getData(), 0, out3.getLength(),
-                  out3.getData(), 0, out3.getLength()), 0);
+    DataOutputBuffer out1 = new DataOutputBuffer();
+    DataOutputBuffer out2 = new DataOutputBuffer();
+    DataOutputBuffer out3 = new DataOutputBuffer();
+    Text.Comparator comparator = new Text.Comparator();
+    for (int i=0; i<NUM_ITERATIONS; i++ ) {
+      // reset output buffer
+      out1.reset();
+      out2.reset();
+      out3.reset();
+
+      // generate two random strings
+      String str1 = getTestString();
+      String str2 = getTestString();
+      if(i == 0 ) {
+        str1 = getLongString();
+        str2 = getLongString();
+      } else {
+        str1 = getTestString();
+        str2 = getTestString();
       }
+          
+      // convert to texts
+      Text txt1 = new Text(str1);
+      Text txt2 = new Text(str2);
+      Text txt3 = new Text(str1);
+          
+      // serialize them
+      txt1.write(out1);
+      txt2.write(out2);
+      txt3.write(out3);
+          
+      // compare two strings by looking at their binary formats
+      int ret1 = comparator.compare(out1.getData(), 0, out1.getLength(),
+                                    out2.getData(), 0, out2.getLength());
+      // compare two strings
+      int ret2 = txt1.compareTo(txt2);
+          
+      assertEquals(ret1, ret2);
+          
+      // test equal
+      assertEquals(txt1.compareTo(txt3), 0);
+      assertEquals(comparator.compare(out1.getData(), 0, out3.getLength(),
+                                      out3.getData(), 0, out3.getLength()), 0);
+    }
   }
       
   public void testFind() throws Exception {
-      Text text = new Text("abcd\u20acbdcd\u20ac");
-      assertTrue(text.find("abd")==-1);
-      assertTrue(text.find("ac")==-1);
-      assertTrue(text.find("\u20ac")==4);
-      assertTrue(text.find("\u20ac", 5)==11);
+    Text text = new Text("abcd\u20acbdcd\u20ac");
+    assertTrue(text.find("abd")==-1);
+    assertTrue(text.find("ac")==-1);
+    assertTrue(text.find("\u20ac")==4);
+    assertTrue(text.find("\u20ac", 5)==11);
   }
 
   public void testValidate() throws Exception {
-      Text text = new Text("abcd\u20acbdcd\u20ac");
-      byte [] utf8 = text.getBytes();
-      int length = text.getLength();
-      Text.validateUTF8(utf8, 0, length);
+    Text text = new Text("abcd\u20acbdcd\u20ac");
+    byte [] utf8 = text.getBytes();
+    int length = text.getLength();
+    Text.validateUTF8(utf8, 0, length);
   }
 
   public void testTextText() throws CharacterCodingException {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestTextNonUTF8.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestTextNonUTF8.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestTextNonUTF8.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestTextNonUTF8.java Mon Apr 16 14:44:35 2007
@@ -31,21 +31,21 @@
   private static final Log LOG= LogFactory.getLog("org.apache.hadoop.io.TestTextNonUTF8");
 
   public void testNonUTF8() throws Exception{
-   // this is a non UTF8 byte array
-   byte b[] = {-0x01, -0x01, -0x01, -0x01, -0x01, -0x01, -0x01};
-   boolean nonUTF8 = false;
-   Text t = new Text(b);
-   try{
-     Text.validateUTF8(b);
-   }catch(MalformedInputException me){
-     nonUTF8 = false;
-   }
-   // asserting that the byte array is non utf8
-   assertFalse(nonUTF8);
-   byte ret[] = t.getBytes();
-   // asseting that the byte array are the same when the Text
-   // object is created.
-   assertTrue(Arrays.equals(b, ret));
+    // this is a non UTF8 byte array
+    byte b[] = {-0x01, -0x01, -0x01, -0x01, -0x01, -0x01, -0x01};
+    boolean nonUTF8 = false;
+    Text t = new Text(b);
+    try{
+      Text.validateUTF8(b);
+    }catch(MalformedInputException me){
+      nonUTF8 = false;
+    }
+    // asserting that the byte array is non utf8
+    assertFalse(nonUTF8);
+    byte ret[] = t.getBytes();
+    // asseting that the byte array are the same when the Text
+    // object is created.
+    assertTrue(Arrays.equals(b, ret));
   }
 
   public static void main(String[] args)  throws Exception

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java Mon Apr 16 14:44:35 2007
@@ -26,154 +26,154 @@
 
 public class TestVersionedWritable extends TestCase {
 
-	public TestVersionedWritable(String name) { super(name); }
+  public TestVersionedWritable(String name) { super(name); }
 	
 	
-	/** Example class used in test cases below. */
-	public static class SimpleVersionedWritable extends VersionedWritable {
+  /** Example class used in test cases below. */
+  public static class SimpleVersionedWritable extends VersionedWritable {
 		
-		private static final Random RANDOM = new Random();
-		int state = RANDOM.nextInt();
+    private static final Random RANDOM = new Random();
+    int state = RANDOM.nextInt();
 
 		
-		private static byte VERSION = 1;
-		public byte getVersion() { 
-			return VERSION; 
-		}		
+    private static byte VERSION = 1;
+    public byte getVersion() { 
+      return VERSION; 
+    }		
 		
 
-		public void write(DataOutput out) throws IOException {
-			super.write(out); // version.
-			out.writeInt(state);
-		}
+    public void write(DataOutput out) throws IOException {
+      super.write(out); // version.
+      out.writeInt(state);
+    }
 		
-		public void readFields(DataInput in) throws IOException {
-			super.readFields(in); // version
-			this.state = in.readInt();
-		}
+    public void readFields(DataInput in) throws IOException {
+      super.readFields(in); // version
+      this.state = in.readInt();
+    }
 		
 
-		public static SimpleVersionedWritable read(DataInput in) throws IOException {
-			SimpleVersionedWritable result = new SimpleVersionedWritable();
-			result.readFields(in);
-			return result;
-		}
+    public static SimpleVersionedWritable read(DataInput in) throws IOException {
+      SimpleVersionedWritable result = new SimpleVersionedWritable();
+      result.readFields(in);
+      return result;
+    }
 		
 
-		/** Required by test code, below. */
-		public boolean equals(Object o) {
-			if (!(o instanceof SimpleVersionedWritable))
-				return false;
-			SimpleVersionedWritable other = (SimpleVersionedWritable)o;
-			return this.state == other.state;
-		}
+    /** Required by test code, below. */
+    public boolean equals(Object o) {
+      if (!(o instanceof SimpleVersionedWritable))
+        return false;
+      SimpleVersionedWritable other = (SimpleVersionedWritable)o;
+      return this.state == other.state;
+    }
 
-	}
+  }
 
 
 	
-	public static class AdvancedVersionedWritable extends SimpleVersionedWritable {
+  public static class AdvancedVersionedWritable extends SimpleVersionedWritable {
 
-		String shortTestString = "Now is the time for all good men to come to the aid of the Party";
-		String longTestString = "Four score and twenty years ago. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah.";
+    String shortTestString = "Now is the time for all good men to come to the aid of the Party";
+    String longTestString = "Four score and twenty years ago. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah.";
 		
-		String compressableTestString = 
-			"Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " +
-			"Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " +
-			"Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " ;
+    String compressableTestString = 
+      "Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " +
+      "Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " +
+      "Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. Blah. " ;
 
-		SimpleVersionedWritable containedObject = new SimpleVersionedWritable();
-		String[] testStringArray = {"The", "Quick", "Brown", "Fox", "Jumped", "Over", "The", "Lazy", "Dog"};
+    SimpleVersionedWritable containedObject = new SimpleVersionedWritable();
+    String[] testStringArray = {"The", "Quick", "Brown", "Fox", "Jumped", "Over", "The", "Lazy", "Dog"};
 
-		public void write(DataOutput out) throws IOException {
-			super.write(out);
-			out.writeUTF(shortTestString); 
-			WritableUtils.writeString(out,longTestString); 
-			int comp = WritableUtils.writeCompressedString(out,compressableTestString); 
-			System.out.println("Compression is " + comp + "%");
-			containedObject.write(out); // Warning if this is a recursive call, you need a null value.
-			WritableUtils.writeStringArray(out,testStringArray); 
+    public void write(DataOutput out) throws IOException {
+      super.write(out);
+      out.writeUTF(shortTestString); 
+      WritableUtils.writeString(out,longTestString); 
+      int comp = WritableUtils.writeCompressedString(out,compressableTestString); 
+      System.out.println("Compression is " + comp + "%");
+      containedObject.write(out); // Warning if this is a recursive call, you need a null value.
+      WritableUtils.writeStringArray(out,testStringArray); 
 
-		}
+    }
 		
 		
-		public void readFields(DataInput in) throws IOException {
-			super.readFields(in);
-			shortTestString = in.readUTF();
-			longTestString = WritableUtils.readString(in); 
-			compressableTestString = WritableUtils.readCompressedString(in);
-			containedObject.readFields(in); // Warning if this is a recursive call, you need a null value.
-			testStringArray = WritableUtils.readStringArray(in); 
-		}
+    public void readFields(DataInput in) throws IOException {
+      super.readFields(in);
+      shortTestString = in.readUTF();
+      longTestString = WritableUtils.readString(in); 
+      compressableTestString = WritableUtils.readCompressedString(in);
+      containedObject.readFields(in); // Warning if this is a recursive call, you need a null value.
+      testStringArray = WritableUtils.readStringArray(in); 
+    }
 			
 
 
-		public boolean equals(Object o) {
-			super.equals(o);
+    public boolean equals(Object o) {
+      super.equals(o);
 
-			if (!shortTestString.equals(((AdvancedVersionedWritable)o).shortTestString)) { return false;}
-			if (!longTestString.equals(((AdvancedVersionedWritable)o).longTestString)) { return false;}
-			if (!compressableTestString.equals(((AdvancedVersionedWritable)o).compressableTestString)) { return false;}
+      if (!shortTestString.equals(((AdvancedVersionedWritable)o).shortTestString)) { return false;}
+      if (!longTestString.equals(((AdvancedVersionedWritable)o).longTestString)) { return false;}
+      if (!compressableTestString.equals(((AdvancedVersionedWritable)o).compressableTestString)) { return false;}
 			
-			if (testStringArray.length != ((AdvancedVersionedWritable)o).testStringArray.length) { return false;}
-			for(int i=0;i< testStringArray.length;i++){
-				if (!testStringArray[i].equals(((AdvancedVersionedWritable)o).testStringArray[i])) {
-					return false;
-				}
-			}
+      if (testStringArray.length != ((AdvancedVersionedWritable)o).testStringArray.length) { return false;}
+      for(int i=0;i< testStringArray.length;i++){
+        if (!testStringArray[i].equals(((AdvancedVersionedWritable)o).testStringArray[i])) {
+          return false;
+        }
+      }
 			
-			if (!containedObject.equals(((AdvancedVersionedWritable)o).containedObject)) { return false;}
+      if (!containedObject.equals(((AdvancedVersionedWritable)o).containedObject)) { return false;}
 			
-			return true;
-		}
+      return true;
+    }
 		
 
 
-	}
+  }
 
-	/* This one checks that version mismatch is thrown... */
-	public static class SimpleVersionedWritableV2 extends SimpleVersionedWritable {
-		static byte VERSION = 2;
-		public byte getVersion() { 
-			return VERSION; 
-		}		
-	}
+  /* This one checks that version mismatch is thrown... */
+  public static class SimpleVersionedWritableV2 extends SimpleVersionedWritable {
+    static byte VERSION = 2;
+    public byte getVersion() { 
+      return VERSION; 
+    }		
+  }
 
 
-	/** Test 1: Check that SimpleVersionedWritable. */
-	public void testSimpleVersionedWritable() throws Exception {
-		TestWritable.testWritable(new SimpleVersionedWritable());
-	}
+  /** Test 1: Check that SimpleVersionedWritable. */
+  public void testSimpleVersionedWritable() throws Exception {
+    TestWritable.testWritable(new SimpleVersionedWritable());
+  }
 
-	/** Test 2: Check that AdvancedVersionedWritable Works (well, why wouldn't it!). */
-	public void testAdvancedVersionedWritable() throws Exception {
-		TestWritable.testWritable(new AdvancedVersionedWritable());
-	}
+  /** Test 2: Check that AdvancedVersionedWritable Works (well, why wouldn't it!). */
+  public void testAdvancedVersionedWritable() throws Exception {
+    TestWritable.testWritable(new AdvancedVersionedWritable());
+  }
 
-	/** Test 3: Check that SimpleVersionedWritable throws an Exception. */
-	public void testSimpleVersionedWritableMismatch() throws Exception {
-		TestVersionedWritable.testVersionedWritable(new SimpleVersionedWritable(), new SimpleVersionedWritableV2());
-	}
+  /** Test 3: Check that SimpleVersionedWritable throws an Exception. */
+  public void testSimpleVersionedWritableMismatch() throws Exception {
+    TestVersionedWritable.testVersionedWritable(new SimpleVersionedWritable(), new SimpleVersionedWritableV2());
+  }
 
 
 
 	
   /** Utility method for testing VersionedWritables. */
   public static void testVersionedWritable(Writable before, Writable after) throws Exception {
-      DataOutputBuffer dob = new DataOutputBuffer();
-      before.write(dob);
+    DataOutputBuffer dob = new DataOutputBuffer();
+    before.write(dob);
 	
-      DataInputBuffer dib = new DataInputBuffer();
-      dib.reset(dob.getData(), dob.getLength());
+    DataInputBuffer dib = new DataInputBuffer();
+    dib.reset(dob.getData(), dob.getLength());
 
-      try {
-          after.readFields(dib);
-      } catch (VersionMismatchException vmme) {
-          System.out.println("Good, we expected this:" + vmme);
-          return;
-      }
+    try {
+      after.readFields(dib);
+    } catch (VersionMismatchException vmme) {
+      System.out.println("Good, we expected this:" + vmme);
+      return;
+    }
 	
-      throw new Exception("A Version Mismatch Didn't Happen!");
+    throw new Exception("A Version Mismatch Didn't Happen!");
   }
 }
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java Mon Apr 16 14:44:35 2007
@@ -62,16 +62,16 @@
 
   /** Utility method for testing writables. */
   public static void testWritable(Writable before) throws Exception {
-      DataOutputBuffer dob = new DataOutputBuffer();
-      before.write(dob);
+    DataOutputBuffer dob = new DataOutputBuffer();
+    before.write(dob);
 
-      DataInputBuffer dib = new DataInputBuffer();
-      dib.reset(dob.getData(), dob.getLength());
+    DataInputBuffer dib = new DataInputBuffer();
+    dib.reset(dob.getData(), dob.getLength());
     
-      Writable after = (Writable)before.getClass().newInstance();
-      after.readFields(dib);
+    Writable after = (Writable)before.getClass().newInstance();
+    after.readFields(dib);
 
-      assertEquals(before, after);
+    assertEquals(before, after);
   }
 	
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java Mon Apr 16 14:44:35 2007
@@ -58,7 +58,7 @@
   }
   
   private static void codecTest(int seed, int count, String codecClass) 
-  throws IOException {
+    throws IOException {
     
     // Create the codec
     Configuration conf = new Configuration();
@@ -102,7 +102,7 @@
     // De-compress data
     DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
     deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, 
-        compressedDataBuffer.getLength());
+                                 compressedDataBuffer.getLength());
     CompressionInputStream inflateFilter = 
       codec.createInputStream(deCompressedDataBuffer);
     DataInputStream inflateIn = 
@@ -134,18 +134,18 @@
     }
 
     try {
-    for (int i=0; i < args.length; ++i) {       // parse command line
-      if (args[i] == null) {
-        continue;
-      } else if (args[i].equals("-count")) {
-        count = Integer.parseInt(args[++i]);
-      } else if (args[i].equals("-codec")) {
-        codecClass = args[++i];
+      for (int i=0; i < args.length; ++i) {       // parse command line
+        if (args[i] == null) {
+          continue;
+        } else if (args[i].equals("-count")) {
+          count = Integer.parseInt(args[++i]);
+        } else if (args[i].equals("-codec")) {
+          codecClass = args[++i];
+        }
       }
-    }
 
-    int seed = 0;
-    codecTest(seed, count, codecClass);
+      int seed = 0;
+      codecTest(seed, count, codecClass);
     } catch (Exception e) {
       System.err.println("Caught: " + e);
       e.printStackTrace();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java Mon Apr 16 14:44:35 2007
@@ -49,7 +49,7 @@
     private boolean sleep;
 
     public TestServer(String bindAddress, int port, int handlerCount, boolean sleep) 
-    throws IOException {
+      throws IOException {
       super(bindAddress, port, LongWritable.class, handlerCount, conf);
       this.setTimeout(1000);
       this.sleep = sleep;
@@ -136,7 +136,7 @@
   }
 
   public void testSerial(int handlerCount, boolean handlerSleep, 
-                          int clientCount, int callerCount, int callCount)
+                         int clientCount, int callerCount, int callCount)
     throws Exception {
     Server server = new TestServer(ADDRESS, PORT, handlerCount, handlerSleep);
     server.start();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java Mon Apr 16 14:44:35 2007
@@ -147,7 +147,7 @@
     Method echo =
       TestProtocol.class.getMethod("echo", new Class[] { String.class });
     String[] strings = (String[])RPC.call(echo, new String[][]{{"a"},{"b"}},
-                                         new InetSocketAddress[] {addr, addr}, conf);
+                                          new InetSocketAddress[] {addr, addr}, conf);
     assertTrue(Arrays.equals(strings, new String[]{"a","b"}));
 
     Method ping = TestProtocol.class.getMethod("ping", new Class[] {});

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRBench.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRBench.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRBench.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRBench.java Mon Apr 16 14:44:35 2007
@@ -56,7 +56,7 @@
    */
   public static class Map extends MapReduceBase implements Mapper {
     public void map(WritableComparable key, Writable value,
-        OutputCollector output, Reporter reporter) throws IOException 
+                    OutputCollector output, Reporter reporter) throws IOException 
     {
       String line = value.toString();
       output.collect(new UTF8(process(line)), new UTF8(""));		
@@ -71,7 +71,7 @@
    */
   public static class Reduce extends MapReduceBase implements Reducer {
     public void reduce(WritableComparable key, Iterator values,
-        OutputCollector output, Reporter reporter) throws IOException 
+                       OutputCollector output, Reporter reporter) throws IOException 
     {
       while(values.hasNext()) {
         output.collect(key, new UTF8(values.next().toString()));
@@ -88,7 +88,7 @@
    * generated data is one of ascending, descending, or random.
    */
   public static void generateTextFile(FileSystem fs, Path inputFile, 
-    long numLines, Order sortOrder) throws IOException 
+                                      long numLines, Order sortOrder) throws IOException 
   {
     LOG.info("creating control file: "+numLines+" numLines, "+sortOrder+" sortOrder");
     PrintStream output = null;
@@ -96,21 +96,21 @@
       output = new PrintStream(fs.create(inputFile));
       int padding = String.valueOf(numLines).length();
       switch(sortOrder) {
-        case RANDOM:
-          for (long l = 0; l < numLines; l++) {
-            output.println(pad((new Random()).nextLong(), padding));
-          }
-          break; 
-        case ASCENDING: 
-          for (long l = 0; l < numLines; l++) {
-            output.println(pad(l, padding));
-          }
-          break;
-        case DESCENDING: 
-          for (long l = numLines; l > 0; l--) {
-            output.println(pad(l, padding));
-          }
-          break;
+      case RANDOM:
+        for (long l = 0; l < numLines; l++) {
+          output.println(pad((new Random()).nextLong(), padding));
+        }
+        break; 
+      case ASCENDING: 
+        for (long l = 0; l < numLines; l++) {
+          output.println(pad(l, padding));
+        }
+        break;
+      case DESCENDING: 
+        for (long l = numLines; l > 0; l--) {
+          output.println(pad(l, padding));
+        }
+        break;
       }
     } finally {
       if (output != null)
@@ -180,8 +180,8 @@
       jobConf.setOutputPath(new Path(OUTPUT_DIR, "output_" + rand.nextInt()));
 
       LOG.info("Running job " + i + ":" +
-        " input=" + jobConf.getInputPaths()[0] + 
-        " output=" + jobConf.getOutputPath());
+               " input=" + jobConf.getInputPaths()[0] + 
+               " output=" + jobConf.getOutputPath());
       
       // run the mapred task now 
       long curTime = System.currentTimeMillis();
@@ -259,14 +259,14 @@
     }
     
     if (numRuns < 1 ||  // verify args
-      numMaps < 1 ||
-      numReduces < 1 ||
-      inputLines < 0 ||
-      inputSortOrder == null)
-    {
-      System.err.println(usage);
-      System.exit(-1);
-    }
+        numMaps < 1 ||
+        numReduces < 1 ||
+        inputLines < 0 ||
+        inputSortOrder == null)
+      {
+        System.err.println(usage);
+        System.exit(-1);
+      }
 
     JobConf jobConf = setupJob(numMaps, numReduces, jarFile);
     FileSystem fs = FileSystem.get(jobConf);
@@ -296,13 +296,13 @@
       totalTime += time.longValue(); 
       if (verbose) {
         System.out.println("Total milliseconds for task: " + (++i) + 
-            " = " +  time);
+                           " = " +  time);
       }
     }
     long avgTime = totalTime / numRuns;    
     System.out.println("DataLines\tMaps\tReduces\tAvgTime (milliseconds)");
     System.out.println(inputLines + "\t\t" + numMaps + "\t" + 
-        numReduces + "\t" + avgTime);
+                       numReduces + "\t" + avgTime);
   }
   
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java Mon Apr 16 14:44:35 2007
@@ -41,7 +41,7 @@
 
 public class MRCaching {
   static String testStr = "This is a test file " + "used for testing caching "
-      + "jars, zip and normal files.";
+    + "jars, zip and normal files.";
 
   /**
    * Using the wordcount example and adding caching to it. The cache
@@ -98,7 +98,7 @@
     }
 
     public void map(WritableComparable key, Writable value,
-        OutputCollector output, Reporter reporter) throws IOException {
+                    OutputCollector output, Reporter reporter) throws IOException {
       String line = ((Text) value).toString();
       StringTokenizer itr = new StringTokenizer(line);
       while (itr.hasMoreTokens()) {
@@ -115,7 +115,7 @@
   public static class ReduceClass extends MapReduceBase implements Reducer {
 
     public void reduce(WritableComparable key, Iterator values,
-        OutputCollector output, Reporter reporter) throws IOException {
+                       OutputCollector output, Reporter reporter) throws IOException {
       int sum = 0;
       while (values.hasNext()) {
         sum += ((IntWritable) values.next()).get();
@@ -125,8 +125,8 @@
   }
 
   public static boolean launchMRCache(String indir,
-      String outdir, String cacheDir, JobConf conf, String input)
-      throws IOException {
+                                      String outdir, String cacheDir, JobConf conf, String input)
+    throws IOException {
     String TEST_ROOT_DIR = new Path(System.getProperty("test.build.data","/tmp"))
       .toString().replace(' ', '+');
     //if (TEST_ROOT_DIR.startsWith("C:")) TEST_ROOT_DIR = "/tmp";
@@ -139,7 +139,7 @@
       throw new IOException("Mkdirs failed to create " + inDir.toString());
     }
     {
-System.out.println("HERE:"+inDir);
+      System.out.println("HERE:"+inDir);
       DataOutputStream file = fs.create(new Path(inDir, "part-0"));
       file.writeBytes(input);
       file.close();
@@ -204,7 +204,7 @@
     Path result = new Path(TEST_ROOT_DIR + "/test.txt");
     {
       BufferedReader file = new BufferedReader(new InputStreamReader(
-          FileSystem.getLocal(conf).open(result)));
+                                                                     FileSystem.getLocal(conf).open(result)));
       String line = file.readLine();
       while (line != null) {
         if (!testStr.equals(line))

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java Mon Apr 16 14:44:35 2007
@@ -27,326 +27,326 @@
  */
 public class MiniMRCluster {
     
-    private Thread jobTrackerThread;
-    private JobTrackerRunner jobTracker;
+  private Thread jobTrackerThread;
+  private JobTrackerRunner jobTracker;
     
-    private int jobTrackerPort = 0;
-    private int taskTrackerPort = 0;
-    private int jobTrackerInfoPort = 0;
-    private int numTaskTrackers;
+  private int jobTrackerPort = 0;
+  private int taskTrackerPort = 0;
+  private int jobTrackerInfoPort = 0;
+  private int numTaskTrackers;
     
-    private List<TaskTrackerRunner> taskTrackerList = new ArrayList<TaskTrackerRunner>();
-    private List<Thread> taskTrackerThreadList = new ArrayList<Thread>();
+  private List<TaskTrackerRunner> taskTrackerList = new ArrayList<TaskTrackerRunner>();
+  private List<Thread> taskTrackerThreadList = new ArrayList<Thread>();
     
-    private String namenode;
+  private String namenode;
     
-    /**
-     * An inner class that runs a job tracker.
-     */
-    class JobTrackerRunner implements Runnable {
+  /**
+   * An inner class that runs a job tracker.
+   */
+  class JobTrackerRunner implements Runnable {
 
-        JobConf jc = null;
+    JobConf jc = null;
         
-        public boolean isUp() {
-            return (JobTracker.getTracker() != null);
-        }
+    public boolean isUp() {
+      return (JobTracker.getTracker() != null);
+    }
         
-        public int getJobTrackerPort() {
-          return JobTracker.getAddress(jc).getPort();
-        }
+    public int getJobTrackerPort() {
+      return JobTracker.getAddress(jc).getPort();
+    }
 
-        public int getJobTrackerInfoPort() {
-          return jc.getInt("mapred.job.tracker.info.port", 50030);
-        }
+    public int getJobTrackerInfoPort() {
+      return jc.getInt("mapred.job.tracker.info.port", 50030);
+    }
         
-        /**
-         * Create the job tracker and run it.
-         */
-        public void run() {
-            try {
-                jc = createJobConf();
-                jc.set("mapred.local.dir","build/test/mapred/local");
-                JobTracker.startTracker(jc);
-            } catch (Throwable e) {
-                System.err.println("Job tracker crashed:");
-                e.printStackTrace();
-            }
-        }
+    /**
+     * Create the job tracker and run it.
+     */
+    public void run() {
+      try {
+        jc = createJobConf();
+        jc.set("mapred.local.dir","build/test/mapred/local");
+        JobTracker.startTracker(jc);
+      } catch (Throwable e) {
+        System.err.println("Job tracker crashed:");
+        e.printStackTrace();
+      }
+    }
         
-        /**
-         * Shutdown the job tracker and wait for it to finish.
-         */
-        public void shutdown() {
-            try {
-                JobTracker.stopTracker();
-            } catch (Throwable e) {
-                System.err.println("Unable to shut down job tracker:");
-                e.printStackTrace();
-            }
-        }
+    /**
+     * Shutdown the job tracker and wait for it to finish.
+     */
+    public void shutdown() {
+      try {
+        JobTracker.stopTracker();
+      } catch (Throwable e) {
+        System.err.println("Unable to shut down job tracker:");
+        e.printStackTrace();
+      }
     }
+  }
     
+  /**
+   * An inner class to run the task tracker.
+   */
+  class TaskTrackerRunner implements Runnable {
+    volatile TaskTracker tt;
+    int trackerId;
+    // the localDirs for this taskTracker
+    String[] localDir;
+    volatile boolean isInitialized = false;
+    volatile boolean isDead = false;
+    int numDir;       
+    TaskTrackerRunner(int trackerId, int numDir) {
+      this.trackerId = trackerId;
+      this.numDir = numDir;
+      // a maximum of 10 local dirs can be specified in MinMRCluster
+      localDir = new String[10];
+    }
+        
     /**
-     * An inner class to run the task tracker.
+     * Create and run the task tracker.
      */
-    class TaskTrackerRunner implements Runnable {
-        volatile TaskTracker tt;
-        int trackerId;
-        // the localDirs for this taskTracker
-        String[] localDir;
-        volatile boolean isInitialized = false;
-        volatile boolean isDead = false;
-        int numDir;       
-        TaskTrackerRunner(int trackerId, int numDir) {
-          this.trackerId = trackerId;
-          this.numDir = numDir;
-          // a maximum of 10 local dirs can be specified in MinMRCluster
-          localDir = new String[10];
-        }
-        
-        /**
-         * Create and run the task tracker.
-         */
-        public void run() {
-            try {
-                JobConf jc = createJobConf();
-                jc.setInt("mapred.task.tracker.info.port", 0);
-                jc.setInt("mapred.task.tracker.report.port", taskTrackerPort);
-                File localDir = new File(jc.get("mapred.local.dir"));
-                String mapredDir = "";
-                File ttDir = new File(localDir, Integer.toString(trackerId) + "_" + 0);
-                if (!ttDir.mkdirs()) {
-                  if (!ttDir.isDirectory()) {
-                    throw new IOException("Mkdirs failed to create " + ttDir.toString());
-                  }
-                }
-                this.localDir[0] = ttDir.getAbsolutePath();
-                mapredDir = ttDir.getAbsolutePath();
-                for (int i = 1; i < numDir; i++){
-                  ttDir = new File(localDir, Integer.toString(trackerId) + "_" + i);
-                  ttDir.mkdirs();
-                  if (!ttDir.mkdirs()) {
-                    if (!ttDir.isDirectory()) {
-                      throw new IOException("Mkdirs failed to create " + ttDir.toString());
-                    }
-                  }
-                  this.localDir[i] = ttDir.getAbsolutePath();
-                  mapredDir = mapredDir + "," + ttDir.getAbsolutePath();
-                }
-                jc.set("mapred.local.dir", mapredDir);
-                System.out.println("mapred.local.dir is " +  mapredDir);
-                tt = new TaskTracker(jc);
-                isInitialized = true;
-                tt.run();
-            } catch (Throwable e) {
-                isDead = true;
-                tt = null;
-                System.err.println("Task tracker crashed:");
-                e.printStackTrace();
-            }
-        }
-        
-        /**
-         * Get the local dir for this TaskTracker.
-         * This is there so that we do not break
-         * previous tests. 
-         * @return the absolute pathname
-         */
-        public String getLocalDir() {
-          return localDir[0];
+    public void run() {
+      try {
+        JobConf jc = createJobConf();
+        jc.setInt("mapred.task.tracker.info.port", 0);
+        jc.setInt("mapred.task.tracker.report.port", taskTrackerPort);
+        File localDir = new File(jc.get("mapred.local.dir"));
+        String mapredDir = "";
+        File ttDir = new File(localDir, Integer.toString(trackerId) + "_" + 0);
+        if (!ttDir.mkdirs()) {
+          if (!ttDir.isDirectory()) {
+            throw new IOException("Mkdirs failed to create " + ttDir.toString());
+          }
         }
-       
-        public String[] getLocalDirs(){
-         return localDir;
-        } 
-        /**
-         * Shut down the server and wait for it to finish.
-         */
-        public void shutdown() {
-            if (tt != null) {
-                try {
-                    tt.shutdown();
-                } catch (Throwable e) {
-                    System.err.println("Unable to shut down task tracker:");
-                    e.printStackTrace();
-                }
+        this.localDir[0] = ttDir.getAbsolutePath();
+        mapredDir = ttDir.getAbsolutePath();
+        for (int i = 1; i < numDir; i++){
+          ttDir = new File(localDir, Integer.toString(trackerId) + "_" + i);
+          ttDir.mkdirs();
+          if (!ttDir.mkdirs()) {
+            if (!ttDir.isDirectory()) {
+              throw new IOException("Mkdirs failed to create " + ttDir.toString());
             }
+          }
+          this.localDir[i] = ttDir.getAbsolutePath();
+          mapredDir = mapredDir + "," + ttDir.getAbsolutePath();
         }
+        jc.set("mapred.local.dir", mapredDir);
+        System.out.println("mapred.local.dir is " +  mapredDir);
+        tt = new TaskTracker(jc);
+        isInitialized = true;
+        tt.run();
+      } catch (Throwable e) {
+        isDead = true;
+        tt = null;
+        System.err.println("Task tracker crashed:");
+        e.printStackTrace();
+      }
     }
-    
+        
     /**
-     * Get the local directory for the Nth task tracker
-     * @param taskTracker the index of the task tracker to check
-     * @return the absolute pathname of the local dir
+     * Get the local dir for this TaskTracker.
+     * This is there so that we do not break
+     * previous tests. 
+     * @return the absolute pathname
      */
-    public String getTaskTrackerLocalDir(int taskTracker) {
-      return ((TaskTrackerRunner) 
-              taskTrackerList.get(taskTracker)).getLocalDir();
+    public String getLocalDir() {
+      return localDir[0];
     }
-
+       
+    public String[] getLocalDirs(){
+      return localDir;
+    } 
     /**
-     * Get the number of task trackers in the cluster
+     * Shut down the server and wait for it to finish.
      */
-    public int getNumTaskTrackers() {
-      return taskTrackerList.size();
+    public void shutdown() {
+      if (tt != null) {
+        try {
+          tt.shutdown();
+        } catch (Throwable e) {
+          System.err.println("Unable to shut down task tracker:");
+          e.printStackTrace();
+        }
+      }
     }
+  }
     
-    /**
-     * Wait until the system is idle.
-     */
-    public void waitUntilIdle() {
-      for(Iterator itr= taskTrackerList.iterator(); itr.hasNext(); ) {
-        TaskTrackerRunner runner = (TaskTrackerRunner) itr.next();
-        while (!runner.isDead && (!runner.isInitialized || !runner.tt.isIdle())) {
-          if (!runner.isInitialized) {
-            System.out.println("Waiting for task tracker to start.");
-          } else {
-            System.out.println("Waiting for task tracker " + runner.tt.getName() +
-                               " to be idle.");
-          }
-          try {
-            Thread.sleep(1000);
-          } catch (InterruptedException ie) {}
+  /**
+   * Get the local directory for the Nth task tracker
+   * @param taskTracker the index of the task tracker to check
+   * @return the absolute pathname of the local dir
+   */
+  public String getTaskTrackerLocalDir(int taskTracker) {
+    return ((TaskTrackerRunner) 
+            taskTrackerList.get(taskTracker)).getLocalDir();
+  }
+
+  /**
+   * Get the number of task trackers in the cluster
+   */
+  public int getNumTaskTrackers() {
+    return taskTrackerList.size();
+  }
+    
+  /**
+   * Wait until the system is idle.
+   */
+  public void waitUntilIdle() {
+    for(Iterator itr= taskTrackerList.iterator(); itr.hasNext(); ) {
+      TaskTrackerRunner runner = (TaskTrackerRunner) itr.next();
+      while (!runner.isDead && (!runner.isInitialized || !runner.tt.isIdle())) {
+        if (!runner.isInitialized) {
+          System.out.println("Waiting for task tracker to start.");
+        } else {
+          System.out.println("Waiting for task tracker " + runner.tt.getName() +
+                             " to be idle.");
         }
+        try {
+          Thread.sleep(1000);
+        } catch (InterruptedException ie) {}
       }
     }
+  }
 
-    /** 
-     * Get the actual rpc port used.
-     */
-    public int getJobTrackerPort() {
-        return jobTrackerPort;
-    }
-
-    public JobConf createJobConf() {
-      JobConf result = new JobConf();
-      result.set("fs.default.name", namenode);
-      result.set("mapred.job.tracker", "localhost:"+jobTrackerPort);
-      result.set("mapred.job.tracker.info.port", jobTrackerInfoPort);
-      // for debugging have all task output sent to the test output
-      JobClient.setTaskOutputFilter(result, JobClient.TaskStatusFilter.ALL);
-      return result;
-    }
-
-    /**
-     * Create the config and the cluster.
-     * @param numTaskTrackers no. of tasktrackers in the cluster
-     * @param namenode the namenode
-     * @param numDir no. of directories
-     * @throws IOException
-     */
-    public MiniMRCluster(int numTaskTrackers, String namenode, int numDir) 
+  /** 
+   * Get the actual rpc port used.
+   */
+  public int getJobTrackerPort() {
+    return jobTrackerPort;
+  }
+
+  public JobConf createJobConf() {
+    JobConf result = new JobConf();
+    result.set("fs.default.name", namenode);
+    result.set("mapred.job.tracker", "localhost:"+jobTrackerPort);
+    result.set("mapred.job.tracker.info.port", jobTrackerInfoPort);
+    // for debugging have all task output sent to the test output
+    JobClient.setTaskOutputFilter(result, JobClient.TaskStatusFilter.ALL);
+    return result;
+  }
+
+  /**
+   * Create the config and the cluster.
+   * @param numTaskTrackers no. of tasktrackers in the cluster
+   * @param namenode the namenode
+   * @param numDir no. of directories
+   * @throws IOException
+   */
+  public MiniMRCluster(int numTaskTrackers, String namenode, int numDir) 
     throws IOException {
-      this(0, 0, numTaskTrackers, namenode, false, numDir);
-    }
+    this(0, 0, numTaskTrackers, namenode, false, numDir);
+  }
     
-    /**
-     * Create the config and start up the servers.  The ports supplied by the user are
-     * just used as suggestions.  If those ports are already in use, new ports
-     * are tried.  The caller should call getJobTrackerPort to get the actual rpc port used.
-     * @deprecated use {@link #MiniMRCluster(int, String, int)}
-     */
-    public MiniMRCluster(int jobTrackerPort,
-                         int taskTrackerPort,
-                         int numTaskTrackers,
-                         String namenode,
-                         boolean taskTrackerFirst) throws IOException {
-        this(jobTrackerPort, taskTrackerPort, numTaskTrackers, namenode, 
-             taskTrackerFirst, 1);
-    } 
-
-    public MiniMRCluster(int jobTrackerPort,
-            int taskTrackerPort,
-            int numTaskTrackers,
-            String namenode,
-            boolean taskTrackerFirst, int numDir) throws IOException {
-
-        this.jobTrackerPort = jobTrackerPort;
-        this.taskTrackerPort = taskTrackerPort;
-        this.jobTrackerInfoPort = 0;
-        this.numTaskTrackers = numTaskTrackers;
-        this.namenode = namenode;
-
-        // Create the JobTracker
-        jobTracker = new JobTrackerRunner();
-        jobTrackerThread = new Thread(jobTracker);
+  /**
+   * Create the config and start up the servers.  The ports supplied by the user are
+   * just used as suggestions.  If those ports are already in use, new ports
+   * are tried.  The caller should call getJobTrackerPort to get the actual rpc port used.
+   * @deprecated use {@link #MiniMRCluster(int, String, int)}
+   */
+  public MiniMRCluster(int jobTrackerPort,
+                       int taskTrackerPort,
+                       int numTaskTrackers,
+                       String namenode,
+                       boolean taskTrackerFirst) throws IOException {
+    this(jobTrackerPort, taskTrackerPort, numTaskTrackers, namenode, 
+         taskTrackerFirst, 1);
+  } 
+
+  public MiniMRCluster(int jobTrackerPort,
+                       int taskTrackerPort,
+                       int numTaskTrackers,
+                       String namenode,
+                       boolean taskTrackerFirst, int numDir) throws IOException {
+
+    this.jobTrackerPort = jobTrackerPort;
+    this.taskTrackerPort = taskTrackerPort;
+    this.jobTrackerInfoPort = 0;
+    this.numTaskTrackers = numTaskTrackers;
+    this.namenode = namenode;
+
+    // Create the JobTracker
+    jobTracker = new JobTrackerRunner();
+    jobTrackerThread = new Thread(jobTracker);
         
-        // Create the TaskTrackers
-        for (int idx = 0; idx < numTaskTrackers; idx++) {
-          TaskTrackerRunner taskTracker = new TaskTrackerRunner(idx, numDir);
-          Thread taskTrackerThread = new Thread(taskTracker);
-          taskTrackerList.add(taskTracker);
-          taskTrackerThreadList.add(taskTrackerThread);
-        }
+    // Create the TaskTrackers
+    for (int idx = 0; idx < numTaskTrackers; idx++) {
+      TaskTrackerRunner taskTracker = new TaskTrackerRunner(idx, numDir);
+      Thread taskTrackerThread = new Thread(taskTracker);
+      taskTrackerList.add(taskTracker);
+      taskTrackerThreadList.add(taskTrackerThread);
+    }
 
-        // Start the MiniMRCluster
+    // Start the MiniMRCluster
         
-        if (taskTrackerFirst) {
-          for (Thread taskTrackerThread : taskTrackerThreadList){
-            taskTrackerThread.start();
-          }
-        }
+    if (taskTrackerFirst) {
+      for (Thread taskTrackerThread : taskTrackerThreadList){
+        taskTrackerThread.start();
+      }
+    }
         
-        jobTrackerThread.start();
-        while (!jobTracker.isUp()) {
-          try {                                     // let daemons get started
-            System.err.println("Waiting for JobTracker to start...");
-            Thread.sleep(1000);
-          } catch(InterruptedException e) {
-          }
-        }
+    jobTrackerThread.start();
+    while (!jobTracker.isUp()) {
+      try {                                     // let daemons get started
+        System.err.println("Waiting for JobTracker to start...");
+        Thread.sleep(1000);
+      } catch(InterruptedException e) {
+      }
+    }
         
-        // Set the configuration for the task-trackers
-        this.jobTrackerPort = jobTracker.getJobTrackerPort();
-        this.jobTrackerInfoPort = jobTracker.getJobTrackerInfoPort();
-
-        if (!taskTrackerFirst) {
-          for (Thread taskTrackerThread : taskTrackerThreadList){
-            taskTrackerThread.start();
-          }
-        }
-
-        // Wait till the MR cluster stabilizes
-        waitUntilIdle();
+    // Set the configuration for the task-trackers
+    this.jobTrackerPort = jobTracker.getJobTrackerPort();
+    this.jobTrackerInfoPort = jobTracker.getJobTrackerInfoPort();
+
+    if (!taskTrackerFirst) {
+      for (Thread taskTrackerThread : taskTrackerThreadList){
+        taskTrackerThread.start();
+      }
     }
+
+    // Wait till the MR cluster stabilizes
+    waitUntilIdle();
+  }
     
-    /**
-     * Shut down the servers.
-     */
-    public void shutdown() {
-      try {
-        waitUntilIdle();
-        for (int idx = 0; idx < numTaskTrackers; idx++) {
-            TaskTrackerRunner taskTracker = (TaskTrackerRunner) taskTrackerList.get(idx);
-            Thread taskTrackerThread = (Thread) taskTrackerThreadList.get(idx);
-            taskTracker.shutdown();
-            taskTrackerThread.interrupt();
-            try {
-                taskTrackerThread.join();
-            } catch (InterruptedException ex) {
-                ex.printStackTrace();
-            }
-        }
-        jobTracker.shutdown();
-        jobTrackerThread.interrupt();
+  /**
+   * Shut down the servers.
+   */
+  public void shutdown() {
+    try {
+      waitUntilIdle();
+      for (int idx = 0; idx < numTaskTrackers; idx++) {
+        TaskTrackerRunner taskTracker = (TaskTrackerRunner) taskTrackerList.get(idx);
+        Thread taskTrackerThread = (Thread) taskTrackerThreadList.get(idx);
+        taskTracker.shutdown();
+        taskTrackerThread.interrupt();
         try {
-            jobTrackerThread.join();
+          taskTrackerThread.join();
         } catch (InterruptedException ex) {
-            ex.printStackTrace();
-        }
-        } finally {
-            File configDir = new File("build", "minimr");
-            File siteFile = new File(configDir, "hadoop-site.xml");
-            siteFile.delete();
+          ex.printStackTrace();
         }
+      }
+      jobTracker.shutdown();
+      jobTrackerThread.interrupt();
+      try {
+        jobTrackerThread.join();
+      } catch (InterruptedException ex) {
+        ex.printStackTrace();
+      }
+    } finally {
+      File configDir = new File("build", "minimr");
+      File siteFile = new File(configDir, "hadoop-site.xml");
+      siteFile.delete();
     }
+  }
     
-    public static void main(String[] args) throws IOException {
-        System.out.println("Bringing up Jobtracker and tasktrackers.");
-        MiniMRCluster mr = new MiniMRCluster(4, "local", 1);
-        System.out.println("JobTracker and TaskTrackers are up.");
-        mr.shutdown();
-        System.out.println("JobTracker and TaskTrackers brought down.");
-    }
+  public static void main(String[] args) throws IOException {
+    System.out.println("Bringing up Jobtracker and tasktrackers.");
+    MiniMRCluster mr = new MiniMRCluster(4, "local", 1);
+    System.out.println("JobTracker and TaskTrackers are up.");
+    mr.shutdown();
+    System.out.println("JobTracker and TaskTrackers brought down.");
+  }
 }
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/PiEstimator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/PiEstimator.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/PiEstimator.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/PiEstimator.java Mon Apr 16 14:44:35 2007
@@ -52,23 +52,23 @@
      * @param reporter
      */
     public void map(WritableComparable key,
-        Writable val,
-        OutputCollector out,
-        Reporter reporter) throws IOException {
-        int nSamples = ((IntWritable) key).get();
-        for(int idx = 0; idx < nSamples; idx++) {
-            double x = r.nextDouble();
-            double y = r.nextDouble();
-            double d = (x-0.5)*(x-0.5)+(y-0.5)*(y-0.5);
-            if (d > 0.25) {
-                out.collect(new IntWritable(0), new IntWritable(1));
-            } else {
-                out.collect(new IntWritable(1), new IntWritable(1));
-            }
-            if (idx%100 == 1) {
-                reporter.setStatus("Generated "+idx+" samples.");
-            }
+                    Writable val,
+                    OutputCollector out,
+                    Reporter reporter) throws IOException {
+      int nSamples = ((IntWritable) key).get();
+      for(int idx = 0; idx < nSamples; idx++) {
+        double x = r.nextDouble();
+        double y = r.nextDouble();
+        double d = (x-0.5)*(x-0.5)+(y-0.5)*(y-0.5);
+        if (d > 0.25) {
+          out.collect(new IntWritable(0), new IntWritable(1));
+        } else {
+          out.collect(new IntWritable(1), new IntWritable(1));
         }
+        if (idx%100 == 1) {
+          reporter.setStatus("Generated "+idx+" samples.");
+        }
+      }
     }
     
     public void close() {
@@ -77,50 +77,50 @@
   }
   
   public static class PiReducer extends MapReduceBase implements Reducer {
-      int numInside = 0;
-      int numOutside = 0;
-      JobConf conf;
+    int numInside = 0;
+    int numOutside = 0;
+    JobConf conf;
       
-      /** Reducer configuration.
-       *
-       */
-      public void configure(JobConf job) {
-          conf = job;
-      }
-      /** Reduce method.
-       * @ param key
-       * @param values
-       * @param output
-       * @param reporter
-       */
-      public void reduce(WritableComparable key,
-              Iterator values,
-              OutputCollector output,
-              Reporter reporter) throws IOException {
-          if (((IntWritable)key).get() == 1) {
-              while (values.hasNext()) {
-                  int num = ((IntWritable)values.next()).get();
-                  numInside += num;
-              }
-          } else {
-              while (values.hasNext()) {
-                  int num = ((IntWritable)values.next()).get();
-                  numOutside += num;
-              }
-          }
+    /** Reducer configuration.
+     *
+     */
+    public void configure(JobConf job) {
+      conf = job;
+    }
+    /** Reduce method.
+     * @ param key
+     * @param values
+     * @param output
+     * @param reporter
+     */
+    public void reduce(WritableComparable key,
+                       Iterator values,
+                       OutputCollector output,
+                       Reporter reporter) throws IOException {
+      if (((IntWritable)key).get() == 1) {
+        while (values.hasNext()) {
+          int num = ((IntWritable)values.next()).get();
+          numInside += num;
+        }
+      } else {
+        while (values.hasNext()) {
+          int num = ((IntWritable)values.next()).get();
+          numOutside += num;
+        }
       }
+    }
       
-      public void close() throws IOException {
-        Path tmpDir = new Path("test-mini-mr");
-        Path outDir = new Path(tmpDir, "out");
-        Path outFile = new Path(outDir, "reduce-out");
-        FileSystem fileSys = FileSystem.get(conf);
-        SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf,
-            outFile, IntWritable.class, IntWritable.class, 
-            CompressionType.NONE);
-        writer.append(new IntWritable(numInside), new IntWritable(numOutside));
-        writer.close();
-      }
+    public void close() throws IOException {
+      Path tmpDir = new Path("test-mini-mr");
+      Path outDir = new Path(tmpDir, "out");
+      Path outFile = new Path(outDir, "reduce-out");
+      FileSystem fileSys = FileSystem.get(conf);
+      SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf,
+                                                             outFile, IntWritable.class, IntWritable.class, 
+                                                             CompressionType.NONE);
+      writer.append(new IntWritable(numInside), new IntWritable(numOutside));
+      writer.close();
+    }
   }
 
   /**
@@ -128,7 +128,7 @@
    * monte-carlo method.
    */
   static double launch(int numMaps, int numPoints, JobConf jobConf)
-  throws IOException {
+    throws IOException {
 
     jobConf.setJarByClass(PiEstimator.class);
     jobConf.setJobName("test-mini-mr");
@@ -163,7 +163,7 @@
     for(int idx=0; idx < numMaps; ++idx) {
       Path file = new Path(inDir, "part"+idx);
       SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, jobConf, 
-          file, IntWritable.class, IntWritable.class, CompressionType.NONE);
+                                                             file, IntWritable.class, IntWritable.class, CompressionType.NONE);
       writer.append(new IntWritable(numPoints), new IntWritable(0));
       writer.close();
     }
@@ -174,7 +174,7 @@
       JobClient.runJob(jobConf);
       Path inFile = new Path(outDir, "reduce-out");
       SequenceFile.Reader reader = new SequenceFile.Reader(fileSys, inFile,
-              jobConf);
+                                                           jobConf);
       IntWritable numInside = new IntWritable();
       IntWritable numOutside = new IntWritable();
       reader.next(numInside, numOutside);
@@ -188,18 +188,18 @@
   }
   
   /**
-     * Launches all the tasks in order.
-     */
-    public static void main(String[] argv) throws Exception {
-        if (argv.length < 2) {
-            System.err.println("Usage: TestMiniMR <nMaps> <nSamples>");
-            return;
-        }
+   * Launches all the tasks in order.
+   */
+  public static void main(String[] argv) throws Exception {
+    if (argv.length < 2) {
+      System.err.println("Usage: TestMiniMR <nMaps> <nSamples>");
+      return;
+    }
 
-        int nMaps = Integer.parseInt(argv[0]);
-        int nSamples = Integer.parseInt(argv[1]);
+    int nMaps = Integer.parseInt(argv[0]);
+    int nSamples = Integer.parseInt(argv[1]);
         
-        System.out.println("Estimated value of PI is "+
-                           launch(nMaps, nSamples, new JobConf()));
-    }
+    System.out.println("Estimated value of PI is "+
+                       launch(nMaps, nSamples, new JobConf()));
+  }
 }



Mime
View raw message