mahout-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sro...@apache.org
Subject svn commit: r986405 [3/6] - in /mahout/trunk: buildtools/ buildtools/src/main/resources/ core/src/main/java/org/apache/mahout/cf/taste/eval/ core/src/main/java/org/apache/mahout/cf/taste/hadoop/ core/src/main/java/org/apache/mahout/cf/taste/hadoop/item...
Date Tue, 17 Aug 2010 17:34:19 GMT
Modified: mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/hadoop/similarity/item/ItemSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/hadoop/similarity/item/ItemSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/hadoop/similarity/item/ItemSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/hadoop/similarity/item/ItemSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -57,7 +57,6 @@ public final class ItemSimilarityTest ex
 
   /**
    * tests {@link CountUsersMapper}
-   * @throws Exception
    */
   public void testCountUsersMapper() throws Exception {
     Mapper<LongWritable,Text,CountUsersKeyWritable,VarLongWritable>.Context context =
@@ -75,9 +74,6 @@ public final class ItemSimilarityTest ex
 
   /**
    * applies an {@link IArgumentMatcher} to a {@link CountUsersKeyWritable} checking whether it matches the userID
-   *
-   * @param userID
-   * @return
    */
   static CountUsersKeyWritable keyForUserID(final long userID) {
     EasyMock.reportMatcher(new IArgumentMatcher() {
@@ -99,8 +95,6 @@ public final class ItemSimilarityTest ex
 
   /**
    * tests {@link CountUsersReducer}
-   *
-   * @throws Exception
    */
   public void testCountUsersReducer() throws Exception {
     Reducer<CountUsersKeyWritable,VarLongWritable,VarIntWritable,NullWritable>.Context context =
@@ -126,9 +120,9 @@ public final class ItemSimilarityTest ex
       EasyMock.createMock(Mapper.Context.class);
     context.write(EasyMock.eq(new VarIntWritable(TasteHadoopUtils.idToIndex(100L))),
         MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(100L),
-        TasteHadoopUtils.idToIndex(12L), 1.3d));
+        TasteHadoopUtils.idToIndex(12L), 1.3));
     context.write(EasyMock.eq(new VarIntWritable(TasteHadoopUtils.idToIndex(20L))),
-        MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(20L), TasteHadoopUtils.idToIndex(35L), 3.0d));
+        MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(20L), TasteHadoopUtils.idToIndex(35L), 3.0));
     EasyMock.replay(context);
 
     PrefsToItemUserMatrixMapper mapper = new PrefsToItemUserMatrixMapper();
@@ -143,9 +137,9 @@ public final class ItemSimilarityTest ex
       EasyMock.createMock(Mapper.Context.class);
     context.write(EasyMock.eq(new VarIntWritable(TasteHadoopUtils.idToIndex(100L))),
         MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(100L),
-        TasteHadoopUtils.idToIndex(12L), 1d));
+        TasteHadoopUtils.idToIndex(12L), 1.0));
     context.write(EasyMock.eq(new VarIntWritable(TasteHadoopUtils.idToIndex(20L))),
-        MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(20L), TasteHadoopUtils.idToIndex(35L), 1d));
+        MathHelper.matrixEntryMatches(TasteHadoopUtils.idToIndex(20L), TasteHadoopUtils.idToIndex(35L), 1.0));
     EasyMock.replay(context);
 
     PrefsToItemUserMatrixMapper mapper = new PrefsToItemUserMatrixMapper();
@@ -158,19 +152,17 @@ public final class ItemSimilarityTest ex
 
   /**
    * tests {@link PrefsToItemUserMatrixReducer}
-   *
-   * @throws Exception
    */
   public void testPrefsToItemUserMatrixReducer() throws Exception {
     Reducer<VarIntWritable, MatrixEntryWritable,IntWritable,VectorWritable>.Context context =
       EasyMock.createMock(Reducer.Context.class);
 
-    context.write(EasyMock.eq(new IntWritable(123)), MathHelper.vectorMatches(MathHelper.elem(1, 0.5d),
-        MathHelper.elem(7, 2.0d)));
+    context.write(EasyMock.eq(new IntWritable(123)), MathHelper.vectorMatches(MathHelper.elem(1, 0.5),
+        MathHelper.elem(7, 2.0)));
     EasyMock.replay(context);
 
-    List<MatrixEntryWritable> entries = Arrays.asList(MathHelper.matrixEntry(123, 1, 0.5d),
-        MathHelper.matrixEntry(123, 7, 2.0d));
+    List<MatrixEntryWritable> entries = Arrays.asList(MathHelper.matrixEntry(123, 1, 0.5),
+        MathHelper.matrixEntry(123, 7, 2.0));
 
     new PrefsToItemUserMatrixReducer().reduce(new VarIntWritable(123), entries, context);
 
@@ -179,8 +171,6 @@ public final class ItemSimilarityTest ex
 
   /**
    * tests {@link MostSimilarItemPairsMapper}
-   *
-   * @throws Exception
    */
   public void testMostSimilarItemsPairsMapper() throws Exception {
 
@@ -192,14 +182,14 @@ public final class ItemSimilarityTest ex
     Mapper<IntWritable,VectorWritable,EntityEntityWritable,DoubleWritable>.Context context =
       EasyMock.createMock(Mapper.Context.class);
 
-    context.write(new EntityEntityWritable(34L, 56L), new DoubleWritable(0.9d));
+    context.write(new EntityEntityWritable(34L, 56L), new DoubleWritable(0.9));
 
     EasyMock.replay(context);
 
     Vector vector = new RandomAccessSparseVector(Integer.MAX_VALUE);
-    vector.set(12, 0.2d);
-    vector.set(34, 1.0d);
-    vector.set(56, 0.9d);
+    vector.set(12, 0.2);
+    vector.set(34, 1.0);
+    vector.set(56, 0.9);
 
     MostSimilarItemPairsMapper mapper = new MostSimilarItemPairsMapper();
     setField(mapper, "indexItemIDMap", indexItemIDMap);
@@ -212,19 +202,17 @@ public final class ItemSimilarityTest ex
 
   /**
    * tests {@link MostSimilarItemPairsReducer}
-   *
-   * @throws Exception
    */
   public void testMostSimilarItemPairsReducer() throws Exception {
     Reducer<EntityEntityWritable,DoubleWritable,EntityEntityWritable,DoubleWritable>.Context context =
       EasyMock.createMock(Reducer.Context.class);
 
-    context.write(new EntityEntityWritable(123L, 456L), new DoubleWritable(0.5d));
+    context.write(new EntityEntityWritable(123L, 456L), new DoubleWritable(0.5));
 
     EasyMock.replay(context);
 
     new MostSimilarItemPairsReducer().reduce(new EntityEntityWritable(123L, 456L),
-        Arrays.asList(new DoubleWritable(0.5d), new DoubleWritable(0.5d)), context);
+        Arrays.asList(new DoubleWritable(0.5), new DoubleWritable(0.5)), context);
 
     EasyMock.verify(context);
   }
@@ -240,8 +228,6 @@ public final class ItemSimilarityTest ex
    * Paul    1       -      1      -
    * Fred    -       -      -      1
    * </pre>
-   *
-   * @throws Exception
    */
   public void testCompleteJob() throws Exception {
 
@@ -336,8 +322,6 @@ public final class ItemSimilarityTest ex
    *    i2 --> i1
    *    i3 --> i1
    * </pre>
-   *
-   * @throws Exception
    */
   public void testMaxSimilaritiesPerItem() throws Exception {
 
@@ -382,7 +366,7 @@ public final class ItemSimilarityTest ex
 
     String line;
     int currentLine = 1;
-    while ( (line = reader.readLine()) != null) {
+    while ((line = reader.readLine()) != null) {
 
       String[] tokens = line.split("\t");
 
@@ -393,19 +377,19 @@ public final class ItemSimilarityTest ex
       if (currentLine == 1) {
         assertEquals(1L, itemAID);
         assertEquals(2L, itemBID);
-        assertEquals(0.5d, similarity, 0.0001d);
+        assertEquals(0.5, similarity, 0.0001);
       }
 
       if (currentLine == 2) {
         assertEquals(1L, itemAID);
         assertEquals(3L, itemBID);
-        assertEquals(0.4, similarity, 0.0001d);
+        assertEquals(0.4, similarity, 0.0001);
       }
 
       currentLine++;
     }
 
-    int linesWritten = currentLine-1;
+    int linesWritten = currentLine - 1;
     assertEquals(2, linesWritten);
   }
 

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/impl/similarity/file/FileItemSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/impl/similarity/file/FileItemSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/impl/similarity/file/FileItemSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/cf/taste/impl/similarity/file/FileItemSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -49,18 +49,18 @@ public class FileItemSimilarityTest exte
   public void testLoadFromFile() throws Exception {
     ItemSimilarity similarity = new FileItemSimilarity(testFile);
 
-    assertEquals(0.125d, similarity.itemSimilarity(1L, 5L));
-    assertEquals(0.125d, similarity.itemSimilarity(5L, 1L));
-    assertEquals(0.5d, similarity.itemSimilarity(1L, 7L));
-    assertEquals(0.5d, similarity.itemSimilarity(7L, 1L));
+    assertEquals(0.125, similarity.itemSimilarity(1L, 5L));
+    assertEquals(0.125, similarity.itemSimilarity(5L, 1L));
+    assertEquals(0.5, similarity.itemSimilarity(1L, 7L));
+    assertEquals(0.5, similarity.itemSimilarity(7L, 1L));
 
     assertTrue(Double.isNaN(similarity.itemSimilarity(7L, 8L)));
 
     double[] valuesForOne = similarity.itemSimilarities(1L, new long[] { 5L, 7L });
     assertNotNull(valuesForOne);
     assertEquals(2, valuesForOne.length);
-    assertEquals(0.125d, valuesForOne[0]);
-    assertEquals(0.5d, valuesForOne[1]);
+    assertEquals(0.125, valuesForOne[0]);
+    assertEquals(0.5, valuesForOne[1]);
   }
 
   public void testNoRefreshAfterFileUpdate() throws Exception {
@@ -75,8 +75,8 @@ public class FileItemSimilarityTest exte
     writeLines(testFile, changedData);
 
     /* we shouldn't see any changes in the data as we have not yet refreshed */
-    assertEquals(0.5d, similarity.itemSimilarity(1L, 7L));
-    assertEquals(0.5d, similarity.itemSimilarity(7L, 1L));
+    assertEquals(0.5, similarity.itemSimilarity(1L, 7L));
+    assertEquals(0.5, similarity.itemSimilarity(7L, 1L));
     assertTrue(Double.isNaN(similarity.itemSimilarity(7L, 8L)));
   }
 
@@ -94,14 +94,14 @@ public class FileItemSimilarityTest exte
     similarity.refresh(null);
 
     /* we should now see the changes in the data */
-    assertEquals(0.9d, similarity.itemSimilarity(1L, 7L));
-    assertEquals(0.9d, similarity.itemSimilarity(7L, 1L));
-    assertEquals(0.125d, similarity.itemSimilarity(1L, 5L));
-    assertEquals(0.125d, similarity.itemSimilarity(5L, 1L));
+    assertEquals(0.9, similarity.itemSimilarity(1L, 7L));
+    assertEquals(0.9, similarity.itemSimilarity(7L, 1L));
+    assertEquals(0.125, similarity.itemSimilarity(1L, 5L));
+    assertEquals(0.125, similarity.itemSimilarity(5L, 1L));
 
     assertFalse(Double.isNaN(similarity.itemSimilarity(7L, 8L)));
-    assertEquals(0.112d, similarity.itemSimilarity(7L, 8L));
-    assertEquals(0.112d, similarity.itemSimilarity(8L, 7L));
+    assertEquals(0.112, similarity.itemSimilarity(7L, 8L));
+    assertEquals(0.112, similarity.itemSimilarity(8L, 7L));
   }
 
   public void testFileNotFoundExceptionForNonExistingFile() throws Exception {
@@ -115,18 +115,18 @@ public class FileItemSimilarityTest exte
     Iterable<ItemItemSimilarity> similarityIterable = new FileItemSimilarity.FileItemItemSimilarityIterable(testFile);
     GenericItemSimilarity similarity = new GenericItemSimilarity(similarityIterable);
 
-    assertEquals(0.125d, similarity.itemSimilarity(1L, 5L));
-    assertEquals(0.125d, similarity.itemSimilarity(5L, 1L));
-    assertEquals(0.5d, similarity.itemSimilarity(1L, 7L));
-    assertEquals(0.5d, similarity.itemSimilarity(7L, 1L));
+    assertEquals(0.125, similarity.itemSimilarity(1L, 5L));
+    assertEquals(0.125, similarity.itemSimilarity(5L, 1L));
+    assertEquals(0.5, similarity.itemSimilarity(1L, 7L));
+    assertEquals(0.5, similarity.itemSimilarity(7L, 1L));
 
     assertTrue(Double.isNaN(similarity.itemSimilarity(7L, 8L)));
 
     double[] valuesForOne = similarity.itemSimilarities(1L, new long[] { 5L, 7L });
     assertNotNull(valuesForOne);
     assertEquals(2, valuesForOne.length);
-    assertEquals(0.125d, valuesForOne[0]);
-    assertEquals(0.5d, valuesForOne[1]);
+    assertEquals(0.125, valuesForOne[0]);
+    assertEquals(0.5, valuesForOne[1]);
   }
 
   public void testToString() throws Exception {

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/classifier/sgd/OnlineLogisticRegressionTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/classifier/sgd/OnlineLogisticRegressionTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/classifier/sgd/OnlineLogisticRegressionTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/classifier/sgd/OnlineLogisticRegressionTest.java Tue Aug 17 17:34:14 2010
@@ -190,10 +190,10 @@ public class OnlineLogisticRegressionTes
     // now test the accuracy
     Matrix tmp = lr.classify(input);
     // mean(abs(tmp - target))
-    double meanAbsoluteError = tmp.getColumn(0).minus(target).aggregate(Functions.plus, Functions.abs) / 60;
+    double meanAbsoluteError = tmp.getColumn(0).minus(target).aggregate(Functions.PLUS, Functions.ABS) / 60;
 
     // max(abs(tmp - target)
-    double maxAbsoluteError = tmp.getColumn(0).minus(target).aggregate(Functions.max, Functions.abs);
+    double maxAbsoluteError = tmp.getColumn(0).minus(target).aggregate(Functions.MAX, Functions.ABS);
 
     System.out.printf("mAE = %.4f, maxAE = %.4f\n", meanAbsoluteError, maxAbsoluteError);
     assertEquals(0, meanAbsoluteError , 0.05);

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/common/AbstractJobTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/common/AbstractJobTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/common/AbstractJobTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/common/AbstractJobTest.java Tue Aug 17 17:34:14 2010
@@ -22,31 +22,28 @@ package org.apache.mahout.common;
 import java.util.HashMap;
 import java.util.Map;
 
+import junit.framework.Assert;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.mahout.common.commandline.DefaultOptionCreator;
 import org.junit.Test;
 
-import junit.framework.TestCase;
-
-/**
- * 
- */
 public class AbstractJobTest {
   
   interface AbstractJobFactory {
-    public AbstractJob getJob();
+    AbstractJob getJob();
   }
   
   @Test
   public void testFlag() throws Exception {
     final Map<String,String> testMap = new HashMap<String,String>();
     
-    final AbstractJobFactory fact = new AbstractJobFactory() {
+    AbstractJobFactory fact = new AbstractJobFactory() {
+      @Override
       public AbstractJob getJob() {
         return new AbstractJob() {
           @Override
-          public int run(String[] args) throws Exception {
+          public int run(String[] args) {
             addFlag("testFlag", "t", "a simple test flag");
             
             Map<String,String> argMap = parseArguments(args);
@@ -60,14 +57,13 @@ public class AbstractJobTest {
     
     // testFlag will only be present if speciied on the command-line
     
-    String[] noFlag   = new String[0];
-    ToolRunner.run(fact.getJob(), noFlag);
-    TestCase.assertFalse("test map for absent flag",
+    ToolRunner.run(fact.getJob(), new String[0]);
+    Assert.assertFalse("test map for absent flag",
         testMap.containsKey("--testFlag"));
     
     String[] withFlag = { "--testFlag" };
     ToolRunner.run(fact.getJob(), withFlag);
-    TestCase.assertTrue("test map for present flag",
+    Assert.assertTrue("test map for present flag",
         testMap.containsKey("--testFlag"));
   }
   
@@ -75,11 +71,12 @@ public class AbstractJobTest {
   public void testOptions() throws Exception {
     final Map<String,String> testMap = new HashMap<String,String>();
     
-    final AbstractJobFactory fact = new AbstractJobFactory() {
+    AbstractJobFactory fact = new AbstractJobFactory() {
+      @Override
       public AbstractJob getJob() {
         return new AbstractJob() {
           @Override
-          public int run(String[] args) throws Exception {
+          public int run(String[] args) {
             this.addOption(DefaultOptionCreator.overwriteOption().create());
             this.addOption("option", "o", "option");
             this.addOption("required", "r", "required", true /* required */);
@@ -100,33 +97,31 @@ public class AbstractJobTest {
         };
       }
     };
-    
-    int ret;
-    
-    ret = ToolRunner.run(fact.getJob(), new String[0]);
-    TestCase.assertEquals("-1 for missing required options", -1, ret);
+
+    int ret = ToolRunner.run(fact.getJob(), new String[0]);
+    Assert.assertEquals("-1 for missing required options", -1, ret);
     
     ret = ToolRunner.run(fact.getJob(), new String[]{
       "--required", "requiredArg"
     });
-    TestCase.assertEquals("0 for no missing required options", 0, ret);
-    TestCase.assertEquals("requiredArg", testMap.get("--required"));
-    TestCase.assertEquals("defaultValue", testMap.get("--hasDefault"));
-    TestCase.assertNull(testMap.get("--option"));
-    TestCase.assertNull(testMap.get("--notRequired"));
-    TestCase.assertFalse(testMap.containsKey("--overwrite"));
+    Assert.assertEquals("0 for no missing required options", 0, ret);
+    Assert.assertEquals("requiredArg", testMap.get("--required"));
+    Assert.assertEquals("defaultValue", testMap.get("--hasDefault"));
+    Assert.assertNull(testMap.get("--option"));
+    Assert.assertNull(testMap.get("--notRequired"));
+    Assert.assertFalse(testMap.containsKey("--overwrite"));
     
     ret = ToolRunner.run(fact.getJob(), new String[]{
       "--required", "requiredArg",
       "--unknownArg"
     });
-    TestCase.assertEquals("-1 for including unknown options", -1, ret);
+    Assert.assertEquals("-1 for including unknown options", -1, ret);
 
     ret = ToolRunner.run(fact.getJob(), new String[]{
       "--required", "requiredArg",
       "--required", "requiredArg2",
     });
-    TestCase.assertEquals("-1 for including duplicate options", -1, ret);
+    Assert.assertEquals("-1 for including duplicate options", -1, ret);
     
     ret = ToolRunner.run(fact.getJob(), new String[]{
       "--required", "requiredArg", 
@@ -135,12 +130,12 @@ public class AbstractJobTest {
       "--option", "optionValue",
       "--notRequired", "notRequired"
     });
-    TestCase.assertEquals("0 for no missing required options", 0, ret);
-    TestCase.assertEquals("requiredArg", testMap.get("--required"));
-    TestCase.assertEquals("nonDefault", testMap.get("--hasDefault"));
-    TestCase.assertEquals("optionValue", testMap.get("--option"));
-    TestCase.assertEquals("notRequired", testMap.get("--notRequired"));
-    TestCase.assertTrue(testMap.containsKey("--overwrite"));
+    Assert.assertEquals("0 for no missing required options", 0, ret);
+    Assert.assertEquals("requiredArg", testMap.get("--required"));
+    Assert.assertEquals("nonDefault", testMap.get("--hasDefault"));
+    Assert.assertEquals("optionValue", testMap.get("--option"));
+    Assert.assertEquals("notRequired", testMap.get("--notRequired"));
+    Assert.assertTrue(testMap.containsKey("--overwrite"));
     
     ret = ToolRunner.run(fact.getJob(), new String[]{
       "-r", "requiredArg", 
@@ -149,23 +144,24 @@ public class AbstractJobTest {
       "-o", "optionValue",
       "-nr", "notRequired"
     });
-    TestCase.assertEquals("0 for no missing required options", 0, ret);
-    TestCase.assertEquals("requiredArg", testMap.get("--required"));
-    TestCase.assertEquals("nonDefault", testMap.get("--hasDefault"));
-    TestCase.assertEquals("optionValue", testMap.get("--option"));
-    TestCase.assertEquals("notRequired", testMap.get("--notRequired"));
-    TestCase.assertTrue(testMap.containsKey("--overwrite"));
+    Assert.assertEquals("0 for no missing required options", 0, ret);
+    Assert.assertEquals("requiredArg", testMap.get("--required"));
+    Assert.assertEquals("nonDefault", testMap.get("--hasDefault"));
+    Assert.assertEquals("optionValue", testMap.get("--option"));
+    Assert.assertEquals("notRequired", testMap.get("--notRequired"));
+    Assert.assertTrue(testMap.containsKey("--overwrite"));
     
   }
   
   @Test
   public void testInputOutputPaths() throws Exception {
     
-    final AbstractJobFactory fact = new AbstractJobFactory() {
+    AbstractJobFactory fact = new AbstractJobFactory() {
+      @Override
       public AbstractJob getJob() {
         return new AbstractJob() {
           @Override
-          public int run(String[] args) throws Exception {
+          public int run(String[] args) {
             addInputOption();
             addOutputOption();
             
@@ -177,67 +173,64 @@ public class AbstractJobTest {
             }
             
             Path inputPath = getInputPath();
-            TestCase.assertNotNull("getInputPath() returns non-null", inputPath);
+            Assert.assertNotNull("getInputPath() returns non-null", inputPath);
             
             Path outputPath = getInputPath();
-            TestCase.assertNotNull("getOutputPath() returns non-null", outputPath);
+            Assert.assertNotNull("getOutputPath() returns non-null", outputPath);
             return 0;
           }
         };
       }
     };
+
+    int ret = ToolRunner.run(fact.getJob(), new String[0]);
+    Assert.assertEquals("-1 for missing input option", -1, ret);
     
-    AbstractJob job;
-    int ret;
-    
-    ret = ToolRunner.run(fact.getJob(), new String[0]);
-    TestCase.assertEquals("-1 for missing input option", -1, ret);
-    
-    final String testInputPath = "testInputPath";
-    final String testOutputPath = "testOutputPath";
-    final String testInputPropertyPath = "testInputPropertyPath";
-    final String testOutputPropertyPath = "testOutputPropertyPath";
-    
-    job = fact.getJob();
-    ret = ToolRunner.run(job, new String[]{ 
+    String testInputPath = "testInputPath";
+    String testOutputPath = "testOutputPath";
+    String testInputPropertyPath = "testInputPropertyPath";
+    String testOutputPropertyPath = "testOutputPropertyPath";
+
+    AbstractJob job = fact.getJob();
+    ret = ToolRunner.run(job, new String[]{
         "--input", testInputPath });
-    TestCase.assertEquals("-1 for missing output option", -1, ret);
-    TestCase.assertEquals("input path is correct", testInputPath, 
+    Assert.assertEquals("-1 for missing output option", -1, ret);
+    Assert.assertEquals("input path is correct", testInputPath,
         job.getInputPath().toString());
     
     job = fact.getJob();
-    ret = ToolRunner.run(job, new String[]{ 
+    ret = ToolRunner.run(job, new String[]{
         "--output", testOutputPath });
-    TestCase.assertEquals("-1 for missing input option", -1, ret);
-    TestCase.assertEquals("output path is correct", testOutputPath, 
+    Assert.assertEquals("-1 for missing input option", -1, ret);
+    Assert.assertEquals("output path is correct", testOutputPath,
         job.getOutputPath().toString());
     
     job = fact.getJob();
-    ret = ToolRunner.run(job, new String[]{ 
+    ret = ToolRunner.run(job, new String[]{
         "--input", testInputPath, "--output", testOutputPath });
-    TestCase.assertEquals("0 for complete options", 0, ret);
-    TestCase.assertEquals("input path is correct", testInputPath, 
+    Assert.assertEquals("0 for complete options", 0, ret);
+    Assert.assertEquals("input path is correct", testInputPath,
         job.getInputPath().toString());
-    TestCase.assertEquals("output path is correct", testOutputPath, 
+    Assert.assertEquals("output path is correct", testOutputPath,
         job.getOutputPath().toString());
     
     job = fact.getJob();
     ret = ToolRunner.run(job, new String[]{ 
         "--input", testInputPath, "--output", testOutputPath });
-    TestCase.assertEquals("0 for complete options", 0, ret);
-    TestCase.assertEquals("input path is correct", testInputPath, 
+    Assert.assertEquals("0 for complete options", 0, ret);
+    Assert.assertEquals("input path is correct", testInputPath,
         job.getInputPath().toString());
-    TestCase.assertEquals("output path is correct", testOutputPath, 
+    Assert.assertEquals("output path is correct", testOutputPath,
         job.getOutputPath().toString());
     
     job = fact.getJob();
     ret = ToolRunner.run(job, new String[]{ 
         "-Dmapred.input.dir=" + testInputPropertyPath, 
         "-Dmapred.output.dir=" + testOutputPropertyPath });
-    TestCase.assertEquals("0 for complete options", 0, ret);
-    TestCase.assertEquals("input path from property is correct", 
+    Assert.assertEquals("0 for complete options", 0, ret);
+    Assert.assertEquals("input path from property is correct",
         testInputPropertyPath, job.getInputPath().toString());
-    TestCase.assertEquals("output path from property is correct", 
+    Assert.assertEquals("output path from property is correct",
         testOutputPropertyPath, job.getOutputPath().toString());
     
     job = fact.getJob();
@@ -246,9 +239,9 @@ public class AbstractJobTest {
         "-Dmapred.output.dir=" + testOutputPropertyPath,
         "--input", testInputPath,
         "--output", testOutputPath });
-    TestCase.assertEquals("input command-line option precedes property", 
+    Assert.assertEquals("input command-line option precedes property",
         testInputPath, job.getInputPath().toString());
-    TestCase.assertEquals("output command-line option precedes property", 
+    Assert.assertEquals("output command-line option precedes property",
         testOutputPath, job.getOutputPath().toString());
 	}
 }

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/common/DummyReporter.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/common/DummyReporter.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/common/DummyReporter.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/common/DummyReporter.java Tue Aug 17 17:34:14 2010
@@ -49,7 +49,7 @@ public class DummyReporter implements Re
   
   @Override
   public void incrCounter(Enum<?> key, long amount) {
-    if (count1.containsKey(key) == false) {
+    if (!count1.containsKey(key)) {
       count1.put(key, new MutableLong(0));
     }
     count1.get(key).add(amount);
@@ -57,10 +57,10 @@ public class DummyReporter implements Re
   
   @Override
   public void incrCounter(String group, String counter, long amount) {
-    if (count2.containsKey(group) == false) {
+    if (!count2.containsKey(group)) {
       count2.put(group, new HashMap<String,MutableLong>());
     }
-    if (count2.get(group).containsKey(counter) == false) {
+    if (!count2.get(group).containsKey(counter)) {
       count2.get(group).put(counter, new MutableLong(0));
     }
     count2.get(group).get(counter).add(amount);

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/common/MahoutTestCase.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/common/MahoutTestCase.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/common/MahoutTestCase.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/common/MahoutTestCase.java Tue Aug 17 17:34:14 2010
@@ -102,11 +102,6 @@ public abstract class MahoutTestCase ext
 
   /**
    * find a declared field in a class or one of it's super classes
-   * 
-   * @param inClass
-   * @param fieldname
-   * @return
-   * @throws NoSuchFieldException
    */
   private Field findDeclaredField(Class<?> inClass, String fieldname) throws NoSuchFieldException {
     if (Object.class.equals(inClass)) {
@@ -121,9 +116,7 @@ public abstract class MahoutTestCase ext
   }
 
   /**
-   * return a job option key string (--name) from the given option name
-   * @param optionName
-   * @return
+   * @return a job option key string (--name) from the given option name
    */
   protected String optKey(String optionName) {
     return AbstractJob.keyFor(optionName);

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DataLoaderTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DataLoaderTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DataLoaderTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DataLoaderTest.java Tue Aug 17 17:34:14 2010
@@ -18,6 +18,7 @@
 package org.apache.mahout.df.data;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 import java.util.Random;
 
@@ -59,7 +60,7 @@ public class DataLoaderTest extends Maho
 
   /**
    * Test method for
-   * {@link org.apache.mahout.df.data.DataLoader#generateDataset(java.lang.String, java.lang.String[])}.
+   * {@link DataLoader#generateDataset(String, String[])}.
    */
   public void testGenerateDataset() throws Exception {
     int nbAttributes = 10;
@@ -83,11 +84,8 @@ public class DataLoaderTest extends Maho
   /**
    * Converts the data to an array of comma-separated strings and adds some
    * missing values in all but IGNORED attributes
-   * 
-   * @param data
-   * @param attrs
+   *
    * @param missings indexes of vectors with missing values
-   * @return
    */
   protected String[] prepareData(double[][] data, Attribute[] attrs, List<Integer> missings) {
     int nbAttributes = attrs.length;
@@ -127,11 +125,8 @@ public class DataLoaderTest extends Maho
 
   /**
    * Test if the loaded data matches the source data
-   * 
-   * @param data
-   * @param attrs
+   *
    * @param missings indexes of instance with missing values
-   * @param loaded
    */
   protected static void testLoadedData(double[][] data, Attribute[] attrs, List<Integer> missings, Data loaded) {
     int nbAttributes = attrs.length;
@@ -176,13 +171,13 @@ public class DataLoaderTest extends Maho
   
   /**
    * Test if the loaded dataset matches the source data
-   * 
-   * @param data
-   * @param attrs
+   *
    * @param missings indexes of instance with missing values
-   * @param loaded
    */
-  protected static void testLoadedDataset(double[][] data, Attribute[] attrs, List<Integer> missings, Data loaded) {
+  protected static void testLoadedDataset(double[][] data,
+                                          Attribute[] attrs,
+                                          Collection<Integer> missings,
+                                          Data loaded) {
     int nbAttributes = attrs.length;
 
     int iId = 0;
@@ -236,7 +231,7 @@ public class DataLoaderTest extends Maho
 
   /**
    * Test method for
-   * {@link org.apache.mahout.df.data.DataLoader#generateDataset(java.lang.String, org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path)}.
+   * {@link DataLoader#generateDataset(String, FileSystem, Path)}.
    */
   public void testGenerateDatasetFromFile() throws Exception {
     int nbAttributes = 10;
@@ -263,16 +258,19 @@ public class DataLoaderTest extends Maho
   /**
    * each time oValue appears in data for the attribute 'attr', the nValue must
    * appear in vectors for the same attribute.
-   * 
-   * @param source
-   * @param loaded
+   *
    * @param attr attribute's index in source
    * @param aId attribute's index in loaded
    * @param oValue old value in source
    * @param nValue new value in loaded
    */
-  protected static void checkCategorical(double[][] source, List<Integer> missings,
-      Data loaded, int attr, int aId, double oValue, double nValue) {
+  protected static void checkCategorical(double[][] source,
+                                         Collection<Integer> missings,
+                                         Data loaded,
+                                         int attr,
+                                         int aId,
+                                         double oValue,
+                                         double nValue) {
     int lind = 0;
 
     for (int index = 0; index < source.length; index++) {
@@ -293,14 +291,15 @@ public class DataLoaderTest extends Maho
   /**
    * each time value appears in data as a label, its corresponding code must
    * appear in all the instances with the same label.
-   * 
-   * @param source
-   * @param loaded
+   *
    * @param labelInd label's index in source
    * @param value source label's value
    */
-  protected static void checkLabel(double[][] source, List<Integer> missings,
-      Data loaded, int labelInd, double value) {
+  protected static void checkLabel(double[][] source,
+                                   Collection<Integer> missings,
+                                   Data loaded,
+                                   int labelInd,
+                                   double value) {
     // label's code that corresponds to the value
     int code = loaded.getDataset().labelCode(Double.toString(value));
 

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DatasetTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DatasetTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DatasetTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/data/DatasetTest.java Tue Aug 17 17:34:14 2010
@@ -31,7 +31,7 @@ import org.apache.mahout.common.RandomUt
 
 public class DatasetTest extends MahoutTestCase {
 
-  private static final int nbAttributes = 10;
+  private static final int NUM_ATTRIBUTES = 10;
 
   private static Dataset readDataset(byte[] bytes) throws IOException {
     ByteArrayInputStream byteInStream = new ByteArrayInputStream(bytes);
@@ -49,7 +49,7 @@ public class DatasetTest extends MahoutT
     for (int nloop=0; nloop< n; nloop++) {
       byteOutStream.reset();
       
-      Dataset dataset = Utils.randomData(rng, nbAttributes, 1).getDataset();
+      Dataset dataset = Utils.randomData(rng, NUM_ATTRIBUTES, 1).getDataset();
       
       dataset.write(out);
       

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/data/Utils.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/data/Utils.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/data/Utils.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/data/Utils.java Tue Aug 17 17:34:14 2010
@@ -30,9 +30,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.mahout.df.callback.PredictionCallback;
 import org.apache.mahout.df.data.Dataset.Attribute;
-import org.slf4j.Logger;
 
 /**
  * Helper methods used by the tests
@@ -42,22 +40,6 @@ public final class Utils {
   private Utils() {
   }
 
-  private static class LogCallback implements PredictionCallback {
-  
-    private static Logger log;
-  
-    private LogCallback(Logger log) {
-      this.log = log;
-    }
-  
-    @Override
-    public void prediction(int treeId, int instanceId, int prediction) {
-      log.info(String.format("treeId:%04d, instanceId:%06d, prediction:%d",
-          treeId, instanceId, prediction));
-    }
-  
-  }
-
   /** Used when generating random CATEGORICAL values */
   private static final int CATEGORICAL_RANGE = 100;
 
@@ -72,7 +54,6 @@ public final class Utils {
    * 
    * @param rng Random number generator
    * @param nbTokens number of tokens to generate
-   * @return
    */
   public static char[] randomTokens(Random rng, int nbTokens) {
     char[] result = new char[nbTokens];
@@ -96,9 +77,6 @@ public final class Utils {
 
   /**
    * Generates a space-separated String that contains all the tokens
-   * 
-   * @param tokens
-   * @return
    */
   public static String generateDescriptor(char[] tokens) {
     StringBuilder builder = new StringBuilder();
@@ -117,9 +95,6 @@ public final class Utils {
    * <li>10% of the attributes are IGNORED</li>
    * <li>one randomly chosen attribute becomes the LABEL</li>
    * </ul>
-   * 
-   * @param nbAttributes
-   * @return
    */
   public static String randomDescriptor(Random rng, int nbAttributes) {
     return generateDescriptor(randomTokens(rng, nbAttributes));
@@ -131,8 +106,6 @@ public final class Utils {
    * @param rng Random number generator
    * @param nbAttributes number of attributes
    * @param number of data lines to generate
-   * @return
-   * @throws Exception 
    */
   public static double[][] randomDoubles(Random rng, int nbAttributes,int number) throws DescriptorException {
     String descriptor = randomDescriptor(rng, nbAttributes);
@@ -172,8 +145,6 @@ public final class Utils {
    * @param rng Random number generator
    * @param nbAttributes number of attributes
    * @param size data size
-   * @return
-   * @throws Exception 
    */
   public static Data randomData(Random rng, int nbAttributes, int size) throws DescriptorException {
     String descriptor = randomDescriptor(rng, nbAttributes);
@@ -194,9 +165,7 @@ public final class Utils {
    * range [0, CATEGORICAL_RANGE[</li>
    * </ul>
    * 
-   * @param rng
    * @param attrs attributes description
-   * @return
    */
   private static double[] randomVector(Random rng, Attribute[] attrs) {
     double[] vector = new double[attrs.length];
@@ -254,8 +223,6 @@ public final class Utils {
    * @param descriptor
    * @param number data size
    * @param value label value
-   * @return
-   * @throws Exception 
    */
   public static double[][] randomDoublesWithSameLabel(Random rng,
       String descriptor, int number, int value) throws DescriptorException {
@@ -272,10 +239,6 @@ public final class Utils {
 
   /**
    * finds the label attribute's index
-   * 
-   * @param descriptor
-   * @return
-   * @throws Exception 
    */
   public static int findLabel(String descriptor) throws DescriptorException {
     Attribute[] attrs = DescriptorUtils.parseDescriptor(descriptor);
@@ -332,10 +295,6 @@ public final class Utils {
 
   /**
    * Split the data into numMaps splits
-   * 
-   * @param sData
-   * @param numMaps
-   * @return
    */
   public static String[][] splitData(String[] sData, int numMaps) {
     int nbInstances = sData.length;

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/InterResultsTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/InterResultsTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/InterResultsTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/InterResultsTest.java Tue Aug 17 17:34:14 2010
@@ -31,16 +31,16 @@ import org.apache.mahout.df.node.Node;
 public class InterResultsTest extends MahoutTestCase {
 
   /** nb attributes per generated data instance */
-  private static final int nbAttributes = 4;
+  private static final int NUM_ATTRIBUTES = 4;
 
   /** nb generated data instances */
-  private static final int nbInstances = 100;
+  private static final int NUM_INSTANCES = 100;
 
   /** nb trees to build */
-  private static final int nbTrees = 11;
+  private static final int NUM_TREES = 11;
 
   /** nb mappers to use */
-  private static final int nbMappers = 5;
+  private static final int NUM_MAPPERS = 5;
 
   private String[][] splits;
 
@@ -56,23 +56,23 @@ public class InterResultsTest extends Ma
     Random rng = RandomUtils.getRandom();
 
     // prepare the data
-    double[][] source = Utils.randomDoubles(rng, nbAttributes, nbInstances);
+    double[][] source = Utils.randomDoubles(rng, NUM_ATTRIBUTES, NUM_INSTANCES);
     String[] sData = Utils.double2String(source);
 
-    splits = Utils.splitData(sData, nbMappers);
+    splits = Utils.splitData(sData, NUM_MAPPERS);
 
-    sizes = new int[nbMappers];
-    for (int p = 0; p < nbMappers; p++) {
+    sizes = new int[NUM_MAPPERS];
+    for (int p = 0; p < NUM_MAPPERS; p++) {
       sizes[p] = splits[p].length;
     }
 
     // prepare first step output
-    keys = new TreeID[nbTrees];
-    trees = new Node[nbTrees];
+    keys = new TreeID[NUM_TREES];
+    trees = new Node[NUM_TREES];
 
     int treeIndex = 0;
-    for (int partition = 0; partition < nbMappers; partition++) {
-      int nbMapTrees = Step1Mapper.nbTrees(nbMappers, nbTrees, partition);
+    for (int partition = 0; partition < NUM_MAPPERS; partition++) {
+      int nbMapTrees = Step1Mapper.nbTrees(NUM_MAPPERS, NUM_TREES, partition);
 
       for (int index = 0; index < nbMapTrees; index++, treeIndex++) {
         keys[treeIndex] = new TreeID(partition, treeIndex);
@@ -91,21 +91,21 @@ public class InterResultsTest extends Ma
 
     InterResults.store(fs, forestPath, keys, trees, sizes);
 
-    for (int partition = 0; partition < nbMappers; partition++) {
-      int nbConcerned = Step2Mapper.nbConcerned(nbMappers, nbTrees, partition);
+    for (int partition = 0; partition < NUM_MAPPERS; partition++) {
+      int nbConcerned = Step2Mapper.nbConcerned(NUM_MAPPERS, NUM_TREES, partition);
 
       TreeID[] newKeys = new TreeID[nbConcerned];
       Node[] newValues = new Node[nbConcerned];
 
-      int numInstances = InterResults.load(fs, forestPath, nbMappers,
-          nbTrees, partition, newKeys, newValues);
+      int numInstances = InterResults.load(fs, forestPath, NUM_MAPPERS,
+          NUM_TREES, partition, newKeys, newValues);
 
       // verify the partition's size
       assertEquals(splits[partition].length, numInstances);
 
       // verify (key, tree)
       int current = 0;
-      for (int index = 0; index < nbTrees; index++) {
+      for (int index = 0; index < NUM_TREES; index++) {
         // the trees of the current partition should not be loaded
         if (current < nbConcerned) {
           assertFalse("A tree from the current partition has been loaded",
@@ -136,13 +136,13 @@ public class InterResultsTest extends Ma
 
     try {
       // partitions' sizes
-      for (int p = 0; p < nbMappers; p++) {
+      for (int p = 0; p < NUM_MAPPERS; p++) {
         assertEquals(splits[p].length, in.readInt());
       }
 
       // load (key, tree)
       TreeID key = new TreeID();
-      for (int index = 0; index < nbTrees; index++) {
+      for (int index = 0; index < NUM_TREES; index++) {
         key.readFields(in);
         Node value = Node.read(in);
 

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/MockContext.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/MockContext.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/MockContext.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/MockContext.java Tue Aug 17 17:34:14 2010
@@ -57,9 +57,7 @@ class MockContext extends Context {
   }
 
   /**
-   * Number of outputs collected
-   * 
-   * @return
+   * @return number of outputs collected
    */
   public int nbOutputs() {
     return index;

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialBuilderTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialBuilderTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialBuilderTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialBuilderTest.java Tue Aug 17 17:34:14 2010
@@ -41,23 +41,23 @@ import org.apache.mahout.df.node.Node;
 
 public class PartialBuilderTest extends MahoutTestCase {
 
-  private static final int numMaps = 5;
+  private static final int NUM_MAPS = 5;
 
-  private static final int numTrees = 32;
+  private static final int NUM_TREES = 32;
 
   /** instances per partition */
   private static final int numInstances = 20;
   
   public void testProcessOutput() throws Exception {
     Configuration conf = new Configuration();
-    conf.setInt("mapred.map.tasks", numMaps);
+    conf.setInt("mapred.map.tasks", NUM_MAPS);
 
     Random rng = RandomUtils.getRandom();
 
     // prepare the output
-    TreeID[] keys = new TreeID[numTrees];
-    MapredOutput[] values = new MapredOutput[numTrees];
-    int[] firstIds = new int[numMaps];
+    TreeID[] keys = new TreeID[NUM_TREES];
+    MapredOutput[] values = new MapredOutput[NUM_TREES];
+    int[] firstIds = new int[NUM_MAPS];
     randomKeyValues(rng, keys, values, firstIds);
 
     // store the output in a sequence file
@@ -68,20 +68,20 @@ public class PartialBuilderTest extends 
     Writer writer = SequenceFile.createWriter(fs, conf, outputFile,
         TreeID.class, MapredOutput.class);
 
-    for (int index = 0; index < numTrees; index++) {
+    for (int index = 0; index < NUM_TREES; index++) {
       writer.append(keys[index], values[index]);
     }
     writer.close();
 
     // load the output and make sure its valid
-    TreeID[] newKeys = new TreeID[numTrees];
-    Node[] newTrees = new Node[numTrees];
+    TreeID[] newKeys = new TreeID[NUM_TREES];
+    Node[] newTrees = new Node[NUM_TREES];
     
     PartialBuilder.processOutput(new Job(conf), base, firstIds, newKeys, newTrees, 
         new TestCallback(keys, values));
 
     // check the forest
-    for (int tree = 0; tree < numTrees; tree++) {
+    for (int tree = 0; tree < NUM_TREES; tree++) {
       assertEquals(values[tree].getTree(), newTrees[tree]);
     }
 
@@ -115,16 +115,16 @@ public class PartialBuilderTest extends 
     int firstId = 0;
     List<Integer> partitions = new ArrayList<Integer>();
 
-    for (int p = 0; p < numMaps; p++) {
+    for (int p = 0; p < NUM_MAPS; p++) {
       // select a random partition, not yet selected
       int partition;
       do {
-        partition = rng.nextInt(numMaps);
+        partition = rng.nextInt(NUM_MAPS);
       } while (partitions.contains(partition));
 
       partitions.add(partition);
 
-      int nbTrees = Step1Mapper.nbTrees(numMaps, numTrees, partition);
+      int nbTrees = Step1Mapper.nbTrees(NUM_MAPS, NUM_TREES, partition);
 
       for (int treeId = 0; treeId < nbTrees; treeId++) {
         Node tree = new Leaf(rng.nextInt(100));
@@ -179,7 +179,7 @@ public class PartialBuilderTest extends 
       // of map tasks
       assertEquals(1, conf.getInt("mapred.map.tasks", -1));
 
-      assertEquals(numTrees, getNbTrees(conf));
+      assertEquals(NUM_TREES, getNbTrees(conf));
 
       assertFalse(isOutput(conf));
       assertTrue(isOobEstimate(conf));

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialSequentialBuilder.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialSequentialBuilder.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialSequentialBuilder.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartialSequentialBuilder.java Tue Aug 17 17:34:14 2010
@@ -178,11 +178,6 @@ public class PartialSequentialBuilder ex
 
   /**
    * extract the decision forest and call the callback after correcting the instance ids
-   * 
-   * @param keys
-   * @param values
-   * @param callback
-   * @return
    */
   protected DecisionForest processOutput(TreeID[] keys, MapredOutput[] values, PredictionCallback callback) {
     List<Node> trees = new ArrayList<Node>();

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartitionBugTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartitionBugTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartitionBugTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/PartitionBugTest.java Tue Aug 17 17:34:14 2010
@@ -38,13 +38,13 @@ import org.apache.mahout.df.data.Utils;
 import org.apache.mahout.df.node.Node;
 
 public class PartitionBugTest extends MahoutTestCase {
-  static final int numAttributes = 40;
+  static final int NUM_ATTRIBUTES = 40;
 
-  static final int numInstances = 200;
+  static final int NUM_INSTANCES = 200;
 
-  static final int numTrees = 10;
+  static final int NUM_TREES = 10;
 
-  static final int numMaps = 5;
+  static final int NUM_MAPS = 5;
     
   /**
    * Make sure that the correct instance ids are being computed
@@ -54,12 +54,12 @@ public class PartitionBugTest extends Ma
     //long seed = rng.nextLong();
 
     // create a dataset large enough to be split up
-    String descriptor = Utils.randomDescriptor(rng, numAttributes);
-    double[][] source = Utils.randomDoubles(rng, descriptor, numInstances);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
+    double[][] source = Utils.randomDoubles(rng, descriptor, NUM_INSTANCES);
 
     // each instance label is its index in the dataset
     int labelId = Utils.findLabel(descriptor);
-    for (int index = 0; index < numInstances; index++) {
+    for (int index = 0; index < NUM_INSTANCES; index++) {
       source[index][labelId] = index;
     }
 
@@ -70,7 +70,7 @@ public class PartitionBugTest extends Ma
     Data data = DataLoader.loadData(dataset, sData);
 
     Configuration conf = new Configuration();
-    Step0JobTest.setMaxSplitSize(conf, dataPath, numMaps);
+    Step0JobTest.setMaxSplitSize(conf, dataPath, NUM_MAPS);
 
     // prepare a custom TreeBuilder that will classify each
     // instance with its own label (in this case its index in the dataset)
@@ -87,7 +87,7 @@ public class PartitionBugTest extends Ma
     Path outputPath = builder.getOutputPath(conf);
     HadoopUtil.overwriteOutput(outputPath);
 
-    builder.build(numTrees, new MockCallback(data));
+    builder.build(NUM_TREES, new MockCallback(data));
   }
 
   /**

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step0JobTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step0JobTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step0JobTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step0JobTest.java Tue Aug 17 17:34:14 2010
@@ -53,13 +53,13 @@ public class Step0JobTest extends Mahout
 
   // the generated data must be big enough to be splited by FileInputFormat
 
-  private static final int numAttributes = 40;
+  private static final int NUM_ATTRIBUTES = 40;
 
-  private static final int numInstances = 2000;
+  private static final int NUM_INSTANCES = 2000;
 
   //int numTrees = 10;
 
-  private static final int numMaps = 5;
+  private static final int NUM_MAPS = 5;
 
   /**
    * Computes the "mapred.max.split.size" that will generate the desired number
@@ -82,8 +82,8 @@ public class Step0JobTest extends Mahout
     Random rng = RandomUtils.getRandom();
 
     // create a dataset large enough to be split up
-    String descriptor = Utils.randomDescriptor(rng, numAttributes);
-    double[][] source = Utils.randomDoubles(rng, descriptor, numInstances);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
+    double[][] source = Utils.randomDoubles(rng, descriptor, NUM_INSTANCES);
     String[] sData = Utils.double2String(source);
 
     // write the data to a file
@@ -93,21 +93,21 @@ public class Step0JobTest extends Mahout
     job.setInputFormatClass(TextInputFormat.class);
     FileInputFormat.setInputPaths(job, dataPath);
 
-    setMaxSplitSize(job.getConfiguration(), dataPath, numMaps);
+    setMaxSplitSize(job.getConfiguration(), dataPath, NUM_MAPS);
 
     // retrieve the splits
     TextInputFormat input = new TextInputFormat();
     List<InputSplit> splits = input.getSplits(job);
-    assertEquals(numMaps, splits.size());
+    assertEquals(NUM_MAPS, splits.size());
 
-    InputSplit[] sorted = new InputSplit[numMaps];
+    InputSplit[] sorted = new InputSplit[NUM_MAPS];
     splits.toArray(sorted);
     Builder.sortSplits(sorted);
 
     Step0Context context = new Step0Context(new Step0Mapper(), job.getConfiguration(),
-                                            new TaskAttemptID(), numMaps);
+                                            new TaskAttemptID(), NUM_MAPS);
 
-    for (int p = 0; p < numMaps; p++) {
+    for (int p = 0; p < NUM_MAPS; p++) {
       InputSplit split = sorted[p];
 
       RecordReader<LongWritable, Text> reader = input.createRecordReader(split,
@@ -146,12 +146,12 @@ public class Step0JobTest extends Mahout
     Random rng = RandomUtils.getRandom();
 
     // create a dataset large enough to be split up
-    String descriptor = Utils.randomDescriptor(rng, numAttributes);
-    double[][] source = Utils.randomDoubles(rng, descriptor, numInstances);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
+    double[][] source = Utils.randomDoubles(rng, descriptor, NUM_INSTANCES);
 
     // each instance label is its index in the dataset
     int labelId = Utils.findLabel(descriptor);
-    for (int index = 0; index < numInstances; index++) {
+    for (int index = 0; index < NUM_INSTANCES; index++) {
       source[index][labelId] = index;
     }
 
@@ -168,26 +168,26 @@ public class Step0JobTest extends Mahout
     job.setInputFormatClass(TextInputFormat.class);
     FileInputFormat.setInputPaths(job, dataPath);
 
-    setMaxSplitSize(job.getConfiguration(), dataPath, numMaps);
+    setMaxSplitSize(job.getConfiguration(), dataPath, NUM_MAPS);
 
     // retrieve the splits
     TextInputFormat input = new TextInputFormat();
     List<InputSplit> splits = input.getSplits(job);
-    assertEquals(numMaps, splits.size());
+    assertEquals(NUM_MAPS, splits.size());
 
-    InputSplit[] sorted = new InputSplit[numMaps];
+    InputSplit[] sorted = new InputSplit[NUM_MAPS];
     splits.toArray(sorted);
     Builder.sortSplits(sorted);
 
     List<Integer> keys = new ArrayList<Integer>();
     List<Step0Output> values = new ArrayList<Step0Output>();
 
-    int[] expectedIds = new int[numMaps];
+    int[] expectedIds = new int[NUM_MAPS];
 
     TaskAttemptContext context = new TaskAttemptContext(job.getConfiguration(),
         new TaskAttemptID());
 
-    for (int p = 0; p < numMaps; p++) {
+    for (int p = 0; p < NUM_MAPS; p++) {
       InputSplit split = sorted[p];
       RecordReader<LongWritable, Text> reader = input.createRecordReader(split,
           context);
@@ -251,9 +251,7 @@ public class Step0JobTest extends Mahout
     }
 
     /**
-     * Number of outputs collected
-     * 
-     * @return
+     * @return number of outputs collected
      */
     public int nbOutputs() {
       return index;

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step1MapperTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step1MapperTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step1MapperTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step1MapperTest.java Tue Aug 17 17:34:14 2010
@@ -71,27 +71,27 @@ public class Step1MapperTest extends Mah
   }
 
   /** nb attributes per generated data instance */
-  protected static final int nbAttributes = 4;
+  protected static final int NUM_ATTRIBUTES = 4;
 
   /** nb generated data instances */
-  protected static final int nbInstances = 100;
+  protected static final int NUM_INSTANCES = 100;
 
   /** nb trees to build */
-  protected static final int nbTrees = 10;
+  protected static final int NUM_TREES = 10;
 
   /** nb mappers to use */
-  protected static final int nbMappers = 2;
+  protected static final int NUM_MAPPERS = 2;
            
   public void testMapper() throws Exception {
     Long seed = null;
     Random rng = RandomUtils.getRandom();
 
     // prepare the data
-    String descriptor = Utils.randomDescriptor(rng, nbAttributes);
-    double[][] source = Utils.randomDoubles(rng, descriptor, nbInstances);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
+    double[][] source = Utils.randomDoubles(rng, descriptor, NUM_INSTANCES);
     String[] sData = Utils.double2String(source);
     Dataset dataset = DataLoader.generateDataset(descriptor, sData);
-    String[][] splits = Utils.splitData(sData, nbMappers);
+    String[][] splits = Utils.splitData(sData, NUM_MAPPERS);
 
     MockTreeBuilder treeBuilder = new MockTreeBuilder();
 
@@ -100,18 +100,18 @@ public class Step1MapperTest extends Mah
 
     int treeIndex = 0;
 
-    for (int partition = 0; partition < nbMappers; partition++) {
+    for (int partition = 0; partition < NUM_MAPPERS; partition++) {
       String[] split = splits[partition];
       treeBuilder.setExpected(DataLoader.loadData(dataset, split));
 
       // expected number of trees that this mapper will build
-      int mapNbTrees = Step1Mapper.nbTrees(nbMappers, nbTrees, partition);
+      int mapNbTrees = Step1Mapper.nbTrees(NUM_MAPPERS, NUM_TREES, partition);
 
       MockContext context = new MockContext(new Step1Mapper(),
           new Configuration(), new TaskAttemptID(), mapNbTrees);
 
       MockStep1Mapper mapper = new MockStep1Mapper(treeBuilder, dataset, seed,
-          partition, nbMappers, nbTrees);
+          partition, NUM_MAPPERS, NUM_TREES);
 
       // make sure the mapper computed firstTreeId correctly
       assertEquals(treeIndex, mapper.getFirstTreeId());

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step2MapperTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step2MapperTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step2MapperTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/mapreduce/partial/Step2MapperTest.java Tue Aug 17 17:34:14 2010
@@ -49,35 +49,35 @@ public class Step2MapperTest extends Mah
   }
 
   /** nb attributes per generated data instance */
-  protected static final int nbAttributes = 4;
+  protected static final int NUM_ATTRIBUTES = 4;
 
   /** nb generated data instances */
-  protected static final int nbInstances = 100;
+  protected static final int NUM_INSTANCES = 100;
 
   /** nb trees to build */
-  protected static final int nbTrees = 11;
+  protected static final int NUM_TREES = 11;
 
   /** nb mappers to use */
-  protected static final int nbMappers = 5;
+  protected static final int NUM_MAPPERS = 5;
  
   public void testMapper() throws Exception {
     Random rng = RandomUtils.getRandom();
 
     // prepare the data
-    String descriptor = Utils.randomDescriptor(rng, nbAttributes);
-    double[][] source = Utils.randomDoubles(rng, descriptor, nbInstances);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
+    double[][] source = Utils.randomDoubles(rng, descriptor, NUM_INSTANCES);
     String[] sData = Utils.double2String(source);
     Dataset dataset = DataLoader.generateDataset(descriptor, sData);
-    String[][] splits = Utils.splitData(sData, nbMappers);
+    String[][] splits = Utils.splitData(sData, NUM_MAPPERS);
 
     // prepare first step output
-    TreeID[] keys = new TreeID[nbTrees];
-    Node[] trees = new Node[nbTrees];
-    int[] sizes = new int[nbMappers];
+    TreeID[] keys = new TreeID[NUM_TREES];
+    Node[] trees = new Node[NUM_TREES];
+    int[] sizes = new int[NUM_MAPPERS];
 
     int treeIndex = 0;
-    for (int partition = 0; partition < nbMappers; partition++) {
-      int nbMapTrees = Step1Mapper.nbTrees(nbMappers, nbTrees, partition);
+    for (int partition = 0; partition < NUM_MAPPERS; partition++) {
+      int nbMapTrees = Step1Mapper.nbTrees(NUM_MAPPERS, NUM_TREES, partition);
 
       for (int tree = 0; tree < nbMapTrees; tree++, treeIndex++) {
         keys[treeIndex] = new TreeID(partition, treeIndex);
@@ -97,11 +97,11 @@ public class Step2MapperTest extends Mah
     LongWritable key = new LongWritable();
     Text value = new Text();
 
-    for (int partition = 0; partition < nbMappers; partition++) {
+    for (int partition = 0; partition < NUM_MAPPERS; partition++) {
       String[] split = splits[partition];
 
       // number of trees that will be handled by the mapper
-      int nbConcerned = Step2Mapper.nbConcerned(nbMappers, nbTrees, partition);
+      int nbConcerned = Step2Mapper.nbConcerned(NUM_MAPPERS, NUM_TREES, partition);
 
       MockContext context = new MockContext(new Step2Mapper(),
           new Configuration(), new TaskAttemptID(), nbConcerned);
@@ -109,7 +109,7 @@ public class Step2MapperTest extends Mah
       // load the current mapper's (key, tree) pairs
       TreeID[] curKeys = new TreeID[nbConcerned];
       Node[] curTrees = new Node[nbConcerned];
-      InterResults.load(fs, forestPath, nbMappers, nbTrees, partition, curKeys,
+      InterResults.load(fs, forestPath, NUM_MAPPERS, NUM_TREES, partition, curKeys,
           curTrees);
 
       // simulate the job
@@ -129,7 +129,7 @@ public class Step2MapperTest extends Mah
 
       // check the returned results
       int current = 0;
-      for (int index = 0; index < nbTrees; index++) {
+      for (int index = 0; index < NUM_TREES; index++) {
         if (keys[index].partition() == partition) {
           // should not be part of the results
           continue;

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/split/DefaultIgSplitTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/split/DefaultIgSplitTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/split/DefaultIgSplitTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/split/DefaultIgSplitTest.java Tue Aug 17 17:34:14 2010
@@ -28,11 +28,11 @@ import org.apache.mahout.df.data.Utils;
 
 public class DefaultIgSplitTest extends MahoutTestCase {
 
-  private static final int nbAttributes = 10;
+  private static final int NUM_ATTRIBUTES = 10;
    
   public void testEntropy() throws Exception {
     Random rng = RandomUtils.getRandom();
-    String descriptor = Utils.randomDescriptor(rng, nbAttributes);
+    String descriptor = Utils.randomDescriptor(rng, NUM_ATTRIBUTES);
     int label = Utils.findLabel(descriptor);
 
     // all the vectors have the same label (0)

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/df/split/OptIgSplitTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/df/split/OptIgSplitTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/df/split/OptIgSplitTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/df/split/OptIgSplitTest.java Tue Aug 17 17:34:14 2010
@@ -26,9 +26,9 @@ import org.apache.mahout.df.data.Utils;
 
 public class OptIgSplitTest extends MahoutTestCase {
 
-  private static final int nbAttributes = 20;
+  private static final int NUM_ATTRIBUTES = 20;
 
-  private static final int numInstances = 100;
+  private static final int NUM_INSTANCES = 100;
  
   public void testComputeSplit() throws Exception {
     int n = 100;
@@ -37,7 +37,7 @@ public class OptIgSplitTest extends Maho
     IgSplit opt = new OptIgSplit();
 
     Random rng = RandomUtils.getRandom();
-    Data data = Utils.randomData(rng, nbAttributes, numInstances);
+    Data data = Utils.randomData(rng, NUM_ATTRIBUTES, NUM_INSTANCES);
 
     for (int nloop = 0; nloop < n; nloop++) {
       int attr = rng.nextInt(data.getDataset().nbAttributes());

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/MathHelper.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/MathHelper.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/MathHelper.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/MathHelper.java Tue Aug 17 17:34:14 2010
@@ -31,7 +31,6 @@ import org.apache.mahout.math.Matrix;
 import org.apache.mahout.math.RandomAccessSparseVector;
 import org.apache.mahout.math.Vector;
 import org.apache.mahout.math.VectorWritable;
-import org.apache.mahout.math.Vector.Element;
 import org.apache.mahout.math.hadoop.DistributedRowMatrix.MatrixEntryWritable;
 import org.easymock.IArgumentMatcher;
 import org.easymock.classextension.EasyMock;
@@ -42,18 +41,13 @@ import org.easymock.classextension.EasyM
 public class MathHelper {
 
   /** the "close enough" value for floating point computations */
-  public static final double EPSILON = 0.00001d;
+  public static final double EPSILON = 0.00001;
 
   private MathHelper() {
   }
 
   /**
    * applies an {@link IArgumentMatcher} to {@link MatrixEntryWritable}s
-   *
-   * @param row
-   * @param col
-   * @param value
-   * @return
    */
   public static MatrixEntryWritable matrixEntryMatches(final int row, final int col, final double value) {
     EasyMock.reportMatcher(new IArgumentMatcher() {
@@ -74,11 +68,6 @@ public class MathHelper {
 
   /**
    * convenience method to create a {@link MatrixEntryWritable}
-   *
-   * @param row
-   * @param col
-   * @param value
-   * @return
    */
   public static MatrixEntryWritable matrixEntry(int row, int col, double value) {
     MatrixEntryWritable entry = new MatrixEntryWritable();
@@ -89,20 +78,16 @@ public class MathHelper {
   }
 
   /**
-   * convenience method to create a {@link Element}
-   *
-   * @param index
-   * @param value
-   * @return
+   * convenience method to create a {@link Vector.Element}
    */
-  public static Element elem(int index, double value) {
+  public static Vector.Element elem(int index, double value) {
     return new ElementToCheck(index, value);
   }
 
   /**
-   * a simple implementation of {@link Element}
+   * a simple implementation of {@link Vector.Element}
    */
-  static class ElementToCheck implements Element {
+  static class ElementToCheck implements Vector.Element {
     private final int index;
     private double value;
 
@@ -126,11 +111,8 @@ public class MathHelper {
 
   /**
    * applies an {@link IArgumentMatcher} to a {@link VectorWritable} that checks whether all elements are included
-   *
-   * @param elements
-   * @return
    */
-  public static VectorWritable vectorMatches(final Element... elements) {
+  public static VectorWritable vectorMatches(final Vector.Element... elements) {
     EasyMock.reportMatcher(new IArgumentMatcher() {
       @Override
       public boolean matches(Object argument) {
@@ -148,11 +130,7 @@ public class MathHelper {
   }
 
   /**
-   * checks whether the {@link Vector} is equivalent to the set of {@link Vector.Element}s 
-   * 
-   * @param vector
-   * @param elements
-   * @return
+   * checks whether the {@link Vector} is equivalent to the set of {@link Vector.Element}s
    */
   public static boolean consistsOf(Vector vector, Vector.Element... elements) {
     if (elements.length != numberOfNoNZeroNonNaNElements(vector)) {
@@ -169,15 +147,12 @@ public class MathHelper {
   
   /**
    * returns the number of elements in the {@link Vector} that are neither 0 nor NaN
-   * 
-   * @param vector
-   * @return
    */
   public static int numberOfNoNZeroNonNaNElements(Vector vector) {
     int elementsInVector = 0;
-    Iterator<Element> vectorIterator = vector.iterateNonZero();
+    Iterator<Vector.Element> vectorIterator = vector.iterateNonZero();
     while (vectorIterator.hasNext()) {
-      Element currentElement = vectorIterator.next();
+      Vector.Element currentElement = vectorIterator.next();
       if (!Double.isNaN(currentElement.get())) {
         elementsInVector++;
       }      
@@ -187,13 +162,6 @@ public class MathHelper {
   
   /**
    * read a {@link Matrix} from a SequenceFile<IntWritable,VectorWritable>
-   * @param fs
-   * @param conf
-   * @param path
-   * @param rows
-   * @param columns
-   * @return
-   * @throws IOException
    */
   public static Matrix readEntries(FileSystem fs, Configuration conf, Path path, int rows, int columns)
       throws IOException {
@@ -207,9 +175,9 @@ public class MathHelper {
       VectorWritable value = new VectorWritable();
       while (reader.next(key, value)) {
         int row = key.get();
-        Iterator<Element> elementsIterator = value.get().iterateNonZero();
+        Iterator<Vector.Element> elementsIterator = value.get().iterateNonZero();
         while (elementsIterator.hasNext()) {
-          Element element = elementsIterator.next();
+          Vector.Element element = elementsIterator.next();
           matrix.set(row, element.index(), element.get());
         }
       }
@@ -221,12 +189,6 @@ public class MathHelper {
 
   /**
    * write a two-dimensional double array to an SequenceFile<IntWritable,VectorWritable>
-   *
-   * @param entries
-   * @param fs
-   * @param conf
-   * @param path
-   * @throws IOException
    */
   public static void writeEntries(double[][] entries, FileSystem fs, Configuration conf, Path path)
       throws IOException {

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/TestRowSimilarityJob.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/TestRowSimilarityJob.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/TestRowSimilarityJob.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/TestRowSimilarityJob.java Tue Aug 17 17:34:14 2010
@@ -49,22 +49,20 @@ import org.easymock.classextension.EasyM
 public class TestRowSimilarityJob extends MahoutTestCase {
 
   /**
-   * @tests {@link RowSimilarityJob.RowWeightMapper}
-   *
-   * @throws Exception
+   * Tests {@link RowSimilarityJob.RowWeightMapper}
    */
   public void testRowWeightMapper() throws Exception {
     Mapper<IntWritable,VectorWritable,VarIntWritable,WeightedOccurrence>.Context context =
       EasyMock.createMock(Mapper.Context.class);
 
-    context.write(new VarIntWritable(456), new WeightedOccurrence(123, 0.5d, 2.0d));
-    context.write(new VarIntWritable(789), new WeightedOccurrence(123, 0.1d, 2.0d));
+    context.write(new VarIntWritable(456), new WeightedOccurrence(123, 0.5, 2.0));
+    context.write(new VarIntWritable(789), new WeightedOccurrence(123, 0.1, 2.0));
 
     EasyMock.replay(context);
 
     Vector vector = new RandomAccessSparseVector(Integer.MAX_VALUE);
-    vector.set(456, 0.5d);
-    vector.set(789, 0.1d);
+    vector.set(456, 0.5);
+    vector.set(789, 0.1);
 
     RowSimilarityJob.RowWeightMapper mapper = new RowSimilarityJob.RowWeightMapper();
     setField(mapper, "similarity", new DistributedTanimotoCoefficientVectorSimilarity());
@@ -75,14 +73,12 @@ public class TestRowSimilarityJob extend
   }
 
   /**
-   * @tests {@link RowSimilarityJob.WeightedOccurrencesPerColumnReducer}
-   *
-   * @throws Exception
+   * Tests {@link RowSimilarityJob.WeightedOccurrencesPerColumnReducer}
    */
   public void testWeightedOccurrencesPerColumnReducer() throws Exception {
 
-    List<WeightedOccurrence> weightedOccurrences = Arrays.asList(new WeightedOccurrence(45, 0.5d, 1.0d),
-        new WeightedOccurrence(78, 3.0d, 9.0d));
+    List<WeightedOccurrence> weightedOccurrences = Arrays.asList(new WeightedOccurrence(45, 0.5, 1.0),
+        new WeightedOccurrence(78, 3.0, 9.0));
 
     Reducer<VarIntWritable,WeightedOccurrence,VarIntWritable,WeightedOccurrenceArray>.Context context =
       EasyMock.createMock(Reducer.Context.class);
@@ -100,8 +96,6 @@ public class TestRowSimilarityJob extend
   /**
    * applies an {@link IArgumentMatcher} to a {@link WeightedOccurrenceArray} that checks whether
    * it matches all {@link WeightedOccurrence}
-   *
-   * @throws Exception
    */
   static WeightedOccurrenceArray weightedOccurrenceArrayMatches(
       final Collection<WeightedOccurrence> occurrencesToMatch) {
@@ -130,22 +124,20 @@ public class TestRowSimilarityJob extend
   }
 
   /**
-   * @tests {@link RowSimilarityJob.CooccurrencesMapper}
-   *
-   * @throws Exception
+   * Tests {@link RowSimilarityJob.CooccurrencesMapper}
    */
   public void testCooccurrencesMapper() throws Exception {
     Mapper<VarIntWritable,WeightedOccurrenceArray,WeightedRowPair,Cooccurrence>.Context context =
       EasyMock.createMock(Mapper.Context.class);
 
-    context.write(new WeightedRowPair(34, 34, 1.0d, 1.0d), new Cooccurrence(12, 0.5d, 0.5d));
-    context.write(new WeightedRowPair(34, 56, 1.0d, 3.0d), new Cooccurrence(12, 0.5d, 1.0d));
-    context.write(new WeightedRowPair(56, 56, 3.0d, 3.0d), new Cooccurrence(12, 1.0d, 1.0d));
+    context.write(new WeightedRowPair(34, 34, 1.0, 1.0), new Cooccurrence(12, 0.5, 0.5));
+    context.write(new WeightedRowPair(34, 56, 1.0, 3.0), new Cooccurrence(12, 0.5, 1.0));
+    context.write(new WeightedRowPair(56, 56, 3.0, 3.0), new Cooccurrence(12, 1.0, 1.0));
 
     EasyMock.replay(context);
 
     WeightedOccurrenceArray weightedOccurrences = new WeightedOccurrenceArray(new WeightedOccurrence[] {
-        new WeightedOccurrence(34, 0.5d, 1.0d), new WeightedOccurrence(56, 1.0d, 3.0d) });
+        new WeightedOccurrence(34, 0.5, 1.0), new WeightedOccurrence(56, 1.0, 3.0) });
 
     new RowSimilarityJob.CooccurrencesMapper().map(new VarIntWritable(12), weightedOccurrences, context);
 
@@ -153,74 +145,68 @@ public class TestRowSimilarityJob extend
   }
 
   /**
-   * @tests {@link SimilarityReducer}
-   *
-   * @throws Exception
+   * Tests {@link SimilarityReducer}
    */
   public void testSimilarityReducer() throws Exception {
 
     Reducer<WeightedRowPair,Cooccurrence,SimilarityMatrixEntryKey,MatrixEntryWritable>.Context context =
       EasyMock.createMock(Reducer.Context.class);
 
-    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(12, 0.5d)),
-        MathHelper.matrixEntryMatches(12, 34, 0.5d));
-    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(34, 0.5d)),
-        MathHelper.matrixEntryMatches(34, 12, 0.5d));
+    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(12, 0.5)),
+        MathHelper.matrixEntryMatches(12, 34, 0.5));
+    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(34, 0.5)),
+        MathHelper.matrixEntryMatches(34, 12, 0.5));
 
     EasyMock.replay(context);
 
     SimilarityReducer reducer = new SimilarityReducer();
     setField(reducer, "similarity", new DistributedTanimotoCoefficientVectorSimilarity());
 
-    reducer.reduce(new WeightedRowPair(12, 34, 3.0d, 3.0d), Arrays.asList(new Cooccurrence(56, 1.0d, 2.0d),
-        new Cooccurrence(78, 3.0d, 6.0d)), context);
+    reducer.reduce(new WeightedRowPair(12, 34, 3.0, 3.0), Arrays.asList(new Cooccurrence(56, 1.0, 2.0),
+        new Cooccurrence(78, 3.0, 6.0)), context);
 
     EasyMock.verify(context);
   }
 
   /**
-   * @tests {@link SimilarityReducer} in the special case of computing the similarity of a row to
+   * Tests {@link SimilarityReducer} in the special case of computing the similarity of a row to
    * itself
-   *
-   * @throws Exception
    */
   public void testSimilarityReducerSelfSimilarity() throws Exception {
 
     Reducer<WeightedRowPair,Cooccurrence,SimilarityMatrixEntryKey,MatrixEntryWritable>.Context context =
       EasyMock.createMock(Reducer.Context.class);
 
-    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(90, 1.0d)), MathHelper.matrixEntryMatches(90, 90, 1.0d));
+    context.write(EasyMock.eq(new SimilarityMatrixEntryKey(90, 1.0)), MathHelper.matrixEntryMatches(90, 90, 1.0));
 
     EasyMock.replay(context);
 
     SimilarityReducer reducer = new SimilarityReducer();
     setField(reducer, "similarity", new DistributedTanimotoCoefficientVectorSimilarity());
 
-    reducer.reduce(new WeightedRowPair(90, 90, 2.0d, 2.0d), Arrays.asList(new Cooccurrence(56, 1.0d, 2.0d),
-        new Cooccurrence(78, 3.0d, 6.0d)), context);
+    reducer.reduce(new WeightedRowPair(90, 90, 2.0, 2.0), Arrays.asList(new Cooccurrence(56, 1.0, 2.0),
+        new Cooccurrence(78, 3.0, 6.0)), context);
 
     EasyMock.verify(context);
   }
 
   /**
-   * @tests {@link EntriesToVectorsReducer}
-   *
-   * @throws Exception
+   * Tests {@link EntriesToVectorsReducer}
    */
   public void testEntriesToVectorsReducer() throws Exception {
     Reducer<SimilarityMatrixEntryKey,MatrixEntryWritable,IntWritable,VectorWritable>.Context context =
       EasyMock.createMock(Reducer.Context.class);
 
-    context.write(EasyMock.eq(new IntWritable(12)), MathHelper.vectorMatches(MathHelper.elem(34, 0.8d)));
+    context.write(EasyMock.eq(new IntWritable(12)), MathHelper.vectorMatches(MathHelper.elem(34, 0.8)));
 
     EasyMock.replay(context);
 
     EntriesToVectorsReducer reducer = new EntriesToVectorsReducer();
     setField(reducer, "maxSimilaritiesPerRow", 1);
 
-    reducer.reduce(new SimilarityMatrixEntryKey(12, 1.0d), Arrays.asList(
-        MathHelper.matrixEntry(12, 34, 0.8d),
-        MathHelper.matrixEntry(12, 56, 0.7d)), context);
+    reducer.reduce(new SimilarityMatrixEntryKey(12, 1.0), Arrays.asList(
+        MathHelper.matrixEntry(12, 34, 0.8),
+        MathHelper.matrixEntry(12, 56, 0.7)), context);
 
     EasyMock.verify(context);
 
@@ -243,8 +229,6 @@ public class TestRowSimilarityJob extend
    * 0.666, 1,     0
    * 0,     0,     1
    * </pre>
-   *
-   * @throws Exception
    */
   public void testSmallSampleMatrix() throws Exception {
 
@@ -280,17 +264,17 @@ public class TestRowSimilarityJob extend
     assertEquals(3, similarityMatrix.numCols());
     assertEquals(3, similarityMatrix.numRows());
 
-    assertEquals(1.0d, similarityMatrix.get(0, 0));
-    assertEquals(1.0d, similarityMatrix.get(1, 1));
-    assertEquals(1.0d, similarityMatrix.get(2, 2));
-
-    assertEquals(0.0d, similarityMatrix.get(2, 0));
-    assertEquals(0.0d, similarityMatrix.get(2, 1));
-    assertEquals(0.0d, similarityMatrix.get(0, 2));
-    assertEquals(0.0d, similarityMatrix.get(1, 2));
+    assertEquals(1.0, similarityMatrix.get(0, 0));
+    assertEquals(1.0, similarityMatrix.get(1, 1));
+    assertEquals(1.0, similarityMatrix.get(2, 2));
+
+    assertEquals(0.0, similarityMatrix.get(2, 0));
+    assertEquals(0.0, similarityMatrix.get(2, 1));
+    assertEquals(0.0, similarityMatrix.get(0, 2));
+    assertEquals(0.0, similarityMatrix.get(1, 2));
 
-    assertEquals(0.6666d, similarityMatrix.get(0, 1), 0.0001);
-    assertEquals(0.6666d, similarityMatrix.get(1, 0), 0.0001);
+    assertEquals(0.6666, similarityMatrix.get(0, 1), 0.0001);
+    assertEquals(0.6666, similarityMatrix.get(1, 0), 0.0001);
   }
 
   /**
@@ -334,8 +318,6 @@ public class TestRowSimilarityJob extend
    * r2 --> r1
    * r3 --> r1
    * </pre>
-   *
-   * @throws Exception
    */
   public void testLimitEntriesInSimilarityMatrix() throws Exception {
 
@@ -372,17 +354,17 @@ public class TestRowSimilarityJob extend
     assertEquals(3, similarityMatrix.numCols());
     assertEquals(3, similarityMatrix.numRows());
 
-    assertEquals(0.0d, similarityMatrix.get(0, 0));
-    assertEquals(0.5d, similarityMatrix.get(0, 1));
-    assertEquals(0.0d, similarityMatrix.get(0, 2));
-
-    assertEquals(0.5d, similarityMatrix.get(1, 0));
-    assertEquals(0.0d, similarityMatrix.get(1, 1));
-    assertEquals(0.0d, similarityMatrix.get(1, 2));
-
-    assertEquals(0.4d, similarityMatrix.get(2, 0));
-    assertEquals(0.0d, similarityMatrix.get(2, 1));
-    assertEquals(0.0d, similarityMatrix.get(2, 2));
+    assertEquals(0.0, similarityMatrix.get(0, 0));
+    assertEquals(0.5, similarityMatrix.get(0, 1));
+    assertEquals(0.0, similarityMatrix.get(0, 2));
+
+    assertEquals(0.5, similarityMatrix.get(1, 0));
+    assertEquals(0.0, similarityMatrix.get(1, 1));
+    assertEquals(0.0, similarityMatrix.get(1, 2));
+
+    assertEquals(0.4, similarityMatrix.get(2, 0));
+    assertEquals(0.0, similarityMatrix.get(2, 1));
+    assertEquals(0.0, similarityMatrix.get(2, 2));
   }
 
 }

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedEuclideanDistanceVectorSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedEuclideanDistanceVectorSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedEuclideanDistanceVectorSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedEuclideanDistanceVectorSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -27,11 +27,11 @@ public class DistributedEuclideanDistanc
 
     assertSimilar(new DistributedEuclideanDistanceVectorSimilarity(),
         asVector(3, -2),
-        asVector(3, -2), 2, 1.0d);
+        asVector(3, -2), 2, 1.0);
 
     assertSimilar(new DistributedEuclideanDistanceVectorSimilarity(),
         asVector(3, 3),
-        asVector(3, 3), 2, 1.0d);
+        asVector(3, 3), 2, 1.0);
 
     assertSimilar(new DistributedEuclideanDistanceVectorSimilarity(),
         asVector(1, 2, 3),
@@ -39,6 +39,6 @@ public class DistributedEuclideanDistanc
 
     assertSimilar(new DistributedEuclideanDistanceVectorSimilarity(),
         asVector(1, 0),
-        asVector(0, 1), 2, 0.0d);
+        asVector(0, 1), 2, 0.0);
   }
 }

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedPearsonCorrelationVectorSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedPearsonCorrelationVectorSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedPearsonCorrelationVectorSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedPearsonCorrelationVectorSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -26,7 +26,7 @@ public class DistributedPearsonCorrelati
   public void testPearsonCorrelation() throws Exception {
     assertSimilar(new DistributedPearsonCorrelationVectorSimilarity(),
         asVector(3, -2),
-        asVector(3, -2), 2, 1.0d);
+        asVector(3, -2), 2, 1.0);
 
     assertSimilar(new DistributedPearsonCorrelationVectorSimilarity(),
         asVector(3, 3),

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedTanimotoCoefficientVectorSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedTanimotoCoefficientVectorSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedTanimotoCoefficientVectorSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedTanimotoCoefficientVectorSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -35,6 +35,6 @@ public class DistributedTanimotoCoeffici
 
     assertSimilar(new DistributedTanimotoCoefficientVectorSimilarity(),
         asVector(0, 1),
-        asVector(0, 1), 2, 1.0d);
+        asVector(0, 1), 2, 1.0);
   }
 }

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredCosineVectorSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredCosineVectorSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredCosineVectorSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredCosineVectorSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -27,7 +27,7 @@ public class DistributedUncenteredCosine
 
     assertSimilar(new DistributedUncenteredCosineVectorSimilarity(),
         asVector(0, 0, 0, 0, 1),
-        asVector(0, 1, 1, 1, 1), 5, 1.0d);
+        asVector(0, 1, 1, 1, 1), 5, 1.0);
 
     assertSimilar(new DistributedUncenteredCosineVectorSimilarity(),
         asVector(0, 1),
@@ -35,7 +35,7 @@ public class DistributedUncenteredCosine
 
     assertSimilar(new DistributedUncenteredCosineVectorSimilarity(),
         asVector(1, 0),
-        asVector(1, 0), 2, 1.0d);
+        asVector(1, 0), 2, 1.0);
 
     assertSimilar(new DistributedUncenteredCosineVectorSimilarity(),
         asVector(1, 1, 2),

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredZeroAssumingCosineVectorSimilarityTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredZeroAssumingCosineVectorSimilarityTest.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredZeroAssumingCosineVectorSimilarityTest.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedUncenteredZeroAssumingCosineVectorSimilarityTest.java Tue Aug 17 17:34:14 2010
@@ -26,7 +26,7 @@ public class DistributedUncenteredZeroAs
   public void testUncenteredZeroAssumingCosine() throws Exception {
     assertSimilar(new DistributedUncenteredZeroAssumingCosineVectorSimilarity(),
         asVector(0, 0, 0, 0, 1),
-        asVector(0, 1, 1, 1, 1), 5, 0.5d);
+        asVector(0, 1, 1, 1, 1), 5, 0.5);
 
     assertSimilar(new DistributedUncenteredZeroAssumingCosineVectorSimilarity(),
         asVector(0, 1),
@@ -34,6 +34,6 @@ public class DistributedUncenteredZeroAs
 
     assertSimilar(new DistributedUncenteredZeroAssumingCosineVectorSimilarity(),
         asVector(1, 0),
-        asVector(1, 0), 2, 1.0d);
+        asVector(1, 0), 2, 1.0);
   }
 }

Modified: mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedVectorSimilarityTestCase.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedVectorSimilarityTestCase.java?rev=986405&r1=986404&r2=986405&view=diff
==============================================================================
--- mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedVectorSimilarityTestCase.java (original)
+++ mahout/trunk/core/src/test/java/org/apache/mahout/math/hadoop/similarity/vector/DistributedVectorSimilarityTestCase.java Tue Aug 17 17:34:14 2010
@@ -34,8 +34,6 @@ public abstract class DistributedVectorS
 
   /**
    * convenience method to create a {@link Vector}
-   * @param values
-   * @return
    */
   static Vector asVector(double... values) {
     return new DenseVector(values);
@@ -77,7 +75,7 @@ public abstract class DistributedVectorS
     for (int n = 0; n < numberOfColumns; n++) {
       double valueA = v1.get(n);
       double valueB = v2.get(n);
-      if (valueA != 0.0d && valueB != 0.0d) {
+      if (valueA != 0.0 && valueB != 0.0) {
         cooccurrences.add(new Cooccurrence(n, valueA, valueB));
       }
     }



Mime
View raw message