commons-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From l..@apache.org
Subject svn commit: r1569342 [2/3] - in /commons/proper/math/trunk/src: main/java/org/apache/commons/math3/fitting/ main/java/org/apache/commons/math3/fitting/leastsquares/ main/java/org/apache/commons/math3/optim/ test/java/org/apache/commons/math3/fitting/le...
Date Tue, 18 Feb 2014 14:31:35 GMT
Modified: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerAbstractTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerAbstractTest.java?rev=1569342&r1=1569341&r2=1569342&view=diff
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerAbstractTest.java (original)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerAbstractTest.java Tue Feb 18 14:31:34 2014
@@ -16,682 +16,523 @@
  */
 package org.apache.commons.math3.fitting.leastsquares;
 
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Arrays;
-import org.apache.commons.math3.analysis.MultivariateVectorFunction;
 import org.apache.commons.math3.analysis.MultivariateMatrixFunction;
+import org.apache.commons.math3.analysis.MultivariateVectorFunction;
 import org.apache.commons.math3.exception.ConvergenceException;
 import org.apache.commons.math3.exception.DimensionMismatchException;
-import org.apache.commons.math3.exception.NumberIsTooSmallException;
+import org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum;
 import org.apache.commons.math3.geometry.euclidean.twod.Vector2D;
 import org.apache.commons.math3.linear.BlockRealMatrix;
-import org.apache.commons.math3.linear.RealMatrix;
 import org.apache.commons.math3.linear.DiagonalMatrix;
-import org.apache.commons.math3.optim.PointVectorValuePair;
+import org.apache.commons.math3.linear.RealMatrix;
+import org.apache.commons.math3.optim.SimpleVectorValueChecker;
 import org.apache.commons.math3.util.FastMath;
 import org.junit.Assert;
-import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
 
 /**
- * Some of the unit tests are re-implementations of the MINPACK
- * <a href="http://www.netlib.org/minpack/ex/file17">file17</a> and
- * <a href="http://www.netlib.org/minpack/ex/file22">file22</a> test files.
- * The redistribution policy for MINPACK is available
- * <a href="http://www.netlib.org/minpack/disclaimer">here</a>.
- *
+ * Some of the unit tests are re-implementations of the MINPACK <a
+ * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
+ * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files. The
+ * redistribution policy for MINPACK is available <a href="http://www.netlib.org/minpack/disclaimer">here</a>.
+ * <p/>
  * <T> Concrete implementation of an optimizer.
  *
  * @version $Id$
  */
-public abstract class AbstractLeastSquaresOptimizerAbstractTest<T extends AbstractLeastSquaresOptimizer<T>> {
+public abstract class AbstractLeastSquaresOptimizerAbstractTest {
+
+    public LeastSquaresBuilder base() {
+        return new LeastSquaresBuilder()
+                .checker(new SimpleVectorValueChecker(1e-6, 1e-6))
+                .maxEvaluations(100)
+                .maxIterations(getMaxIterations());
+    }
+
+    public LeastSquaresBuilder builder(CircleVectorial c) {
+        final double[] weights = new double[c.getN()];
+        Arrays.fill(weights, 1.0);
+        return base()
+                .model(c.getModelFunction())
+                .jacobian(c.getModelFunctionJacobian())
+                .target(new double[c.getN()])
+                .weight(new DiagonalMatrix(weights));
+    }
+
+    public LeastSquaresBuilder builder(StatisticalReferenceDataset dataset) {
+        StatisticalReferenceDataset.LeastSquaresProblem problem
+                = dataset.getLeastSquaresProblem();
+        final double[] weights = new double[dataset.getNumObservations()];
+        Arrays.fill(weights, 1.0);
+        return base()
+                .model(problem.getModelFunction())
+                .jacobian(problem.getModelFunctionJacobian())
+                .target(dataset.getData()[1])
+                .weight(new DiagonalMatrix(weights))
+                .start(dataset.getStartingPoint(0));
+    }
+
+    public void fail(LeastSquaresOptimizer optimizer) {
+        Assert.fail("Expected Exception from: " + optimizer.toString());
+    }
+
     /**
-     * @return a concrete optimizer.
+     * @return the default number of allowed iterations (which will be used when not
+     *         specified otherwise).
      */
-    public abstract T createOptimizer();
+    public abstract int getMaxIterations();
 
     /**
-     * @return the default number of allowed iterations (which will be
-     * used when not specified otherwise).
+     * Test the give optimizer on a suite of sample problems. If you need to disable a
+     * particular test case override it in your subclass. If you want to add more tests
+     * override this method and call super.
      */
-    public abstract int getMaxIterations();
+    public void check(LeastSquaresOptimizer optimizer) throws Exception {
+        testGetIterations(optimizer);
+        testTrivial(optimizer);
+        testQRColumnsPermutation(optimizer);
+        testNoDependency(optimizer);
+        testOneSet(optimizer);
+        testTwoSets(optimizer);
+        testNonInvertible(optimizer);
+        testIllConditioned(optimizer);
+        testMoreEstimatedParametersSimple(optimizer);
+        testMoreEstimatedParametersUnsorted(optimizer);
+        testRedundantEquations(optimizer);
+        testInconsistentEquations(optimizer);
+        testInconsistentSizes1(optimizer);
+        testInconsistentSizes2(optimizer);
+        testCircleFitting(optimizer);
+        testCircleFittingBadInit(optimizer);
+        testCircleFittingGoodInit(optimizer);
+        testKirby2(optimizer);
+        testHahn1(optimizer);
+    }
+
+    public void testGetIterations(LeastSquaresOptimizer optimizer) {
+        LeastSquaresProblem lsp = base()
+                .target(new double[]{1})
+                .weight(new DiagonalMatrix(new double[]{1}))
+                .start(new double[]{3})
+                .model(
+                        new MultivariateVectorFunction() {
+                            public double[] value(double[] point) {
+                                return new double[]{
+                                        FastMath.pow(point[0], 4)
+                                };
+                            }
+                        }
+                )
+                .jacobian(
+                        new MultivariateMatrixFunction() {
+                            public double[][] value(double[] point) {
+                                return new double[][]{
+                                        {0.25 * FastMath.pow(point[0], 3)}
+                                };
+                            }
+                        }
+                )
+                .build();
 
-    @Test
-    public void testShallowCopy() {
-        final int maxEval1 = 12;
-        final int maxIter1 = 23;
-        final double[] target1 = { 3.4 };
-        final double[] weight1 = { 4.5 };
-        final double[] start1 = { 5.6 };
-        final double factor1 = 6.7;
-        final MultivariateVectorFunction model1 = new MultivariateVectorFunction() {
-                public double[] value(double[] point) {
-                    return new double[] {
-                        factor1 * factor1 * point[0]
-                    };
-                }};
-        final MultivariateMatrixFunction jac1 = new MultivariateMatrixFunction() {
-                    public double[][] value(double[] point) {
-                        return new double[][] {
-                            { 2 * factor1 * point[0] }
-                        };
-                    }
-                };
-
-
-        final T optim1 = createOptimizer()
-            .withMaxEvaluations(maxEval1)
-            .withMaxIterations(maxIter1)
-            .withTarget(target1)
-            .withWeight(new DiagonalMatrix(weight1))
-            .withStartPoint(start1)
-            .withModelAndJacobian(model1, jac1);
-
-        final T optim2 = optim1.shallowCopy();
-
-        // Check that all fields have the same values.
-        Assert.assertTrue(optim1.getMaxEvaluations() == optim2.getMaxEvaluations());
-        Assert.assertTrue(optim1.getMaxIterations() == optim2.getMaxIterations());
-        Assert.assertTrue(optim1.getTarget()[0] == optim2.getTarget()[0]);
-        Assert.assertTrue(optim1.getWeight().getEntry(0, 0) == optim2.getWeight().getEntry(0, 0));
-        Assert.assertTrue(optim1.getStart()[0] == optim2.getStart()[0]);
-        Assert.assertTrue(optim1.getModel().value(new double[] {32})[0] == optim2.getModel().value(new double[] {32})[0]);
-        Assert.assertTrue(optim1.getJacobian().value(new double[] {54})[0][0] == optim2.getJacobian().value(new double[] {54})[0][0]);
-
-        // Change "optim2".
-        final int maxEval2 = 122;
-        final int maxIter2 = 232;
-        final double[] target2 = { 3.42 };
-        final double[] weight2 = { 4.52 };
-        final double[] start2 = { 5.62 };
-        final double factor2 = 6.72;
-        final MultivariateVectorFunction model2 = new MultivariateVectorFunction() {
-                public double[] value(double[] point) {
-                    return new double[] {
-                        factor2 * factor2 * point[0]
-                    };
-                }};
-        final MultivariateMatrixFunction jac2 = new MultivariateMatrixFunction() {
-                    public double[][] value(double[] point) {
-                        return new double[][] {
-                            { 2 * factor2 * point[0] }
-                        };
-                    }
-                };
-
-        optim2
-            .withMaxEvaluations(maxEval2)
-            .withMaxIterations(maxIter2)
-            .withTarget(target2)
-            .withWeight(new DiagonalMatrix(weight2))
-            .withStartPoint(start2)
-            .withModelAndJacobian(model2, jac2);
-
-        // Check that all fields now have different values.
-        Assert.assertFalse(optim1.getMaxEvaluations() == optim2.getMaxEvaluations());
-        Assert.assertFalse(optim1.getMaxIterations() == optim2.getMaxIterations());
-        Assert.assertFalse(optim1.getTarget()[0] == optim2.getTarget()[0]);
-        Assert.assertFalse(optim1.getWeight().getEntry(0, 0) == optim2.getWeight().getEntry(0, 0));
-        Assert.assertFalse(optim1.getStart()[0] == optim2.getStart()[0]);
-        Assert.assertFalse(optim1.getModel().value(new double[] {32})[0] == optim2.getModel().value(new double[] {32})[0]);
-        Assert.assertFalse(optim1.getJacobian().value(new double[] {54})[0][0] == optim2.getJacobian().value(new double[] {54})[0][0]);
-    }
-
-    @Test
-    public void testGetIterations() {
-        T optim = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withTarget(new double[] { 1 })
-            .withWeight(new DiagonalMatrix(new double[] { 1 }))
-            .withStartPoint(new double[] { 3 })
-            .withModelAndJacobian(new MultivariateVectorFunction() {
-                    public double[] value(double[] point) {
-                        return new double[] {
-                            FastMath.pow(point[0], 4)
-                        };
-                    }},
-                new MultivariateMatrixFunction() {
-                    public double[][] value(double[] point) {
-                        return new double[][] {
-                            { 0.25 * FastMath.pow(point[0], 3) }
-                        };
-                    }
-                });
+        Optimum optimum = optimizer.optimize(lsp);
 
-        optim.optimize();
-        Assert.assertTrue(optim.getIterations() > 0);
+        //TODO more specific test? could pass with 'return 1;'
+        Assert.assertTrue(optimum.getIterations() > 0);
     }
 
-    @Test
-    public void testTrivial() {
+    public void testTrivial(LeastSquaresOptimizer optimizer) {
         LinearProblem problem
-            = new LinearProblem(new double[][] { { 2 } },
-                                new double[] { 3 });
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1 }))
-            .withStartPoint(new double[] { 0 });
+                = new LinearProblem(new double[][]{{2}},
+                new double[]{3});
+        LeastSquaresProblem ls = problem.getBuilder().build();
 
-        PointVectorValuePair optimum = optimizer.optimize();
+        Optimum optimum = optimizer.optimize(ls);
 
-        Assert.assertEquals(0, optimizer.computeRMS(optimum.getPoint()), 1e-10);
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
         Assert.assertEquals(1.5, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(3.0, optimum.getValue()[0], 1e-10);
+        Assert.assertEquals(3.0, optimum.computeValue()[0], 1e-10);
     }
 
-    @Test
-    public void testQRColumnsPermutation() {
+    public void testQRColumnsPermutation(LeastSquaresOptimizer optimizer) {
         LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, -1 }, { 0, 2 }, { 1, -2 } },
-                                new double[] { 4, 6, 1 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0 });
+                = new LinearProblem(new double[][]{{1, -1}, {0, 2}, {1, -2}},
+                new double[]{4, 6, 1});
 
-        PointVectorValuePair optimum = optimizer.optimize();
+        Optimum optimum = optimizer.optimize(problem.getBuilder().build());
 
-        Assert.assertEquals(0, optimizer.computeRMS(optimum.getPoint()), 1e-10);
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
         Assert.assertEquals(7, optimum.getPoint()[0], 1e-10);
         Assert.assertEquals(3, optimum.getPoint()[1], 1e-10);
-        Assert.assertEquals(4, optimum.getValue()[0], 1e-10);
-        Assert.assertEquals(6, optimum.getValue()[1], 1e-10);
-        Assert.assertEquals(1, optimum.getValue()[2], 1e-10);
-    }
-
-    @Test
-    public void testNoDependency() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 2, 0, 0, 0, 0, 0 },
-                { 0, 2, 0, 0, 0, 0 },
-                { 0, 0, 2, 0, 0, 0 },
-                { 0, 0, 0, 2, 0, 0 },
-                { 0, 0, 0, 0, 2, 0 },
-                { 0, 0, 0, 0, 0, 2 }
-        }, new double[] { 0, 1.1, 2.2, 3.3, 4.4, 5.5 });
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0, 0, 0, 0, 0 });
+        Assert.assertEquals(4, optimum.computeValue()[0], 1e-10);
+        Assert.assertEquals(6, optimum.computeValue()[1], 1e-10);
+        Assert.assertEquals(1, optimum.computeValue()[2], 1e-10);
+    }
+
+    public void testNoDependency(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {2, 0, 0, 0, 0, 0},
+                {0, 2, 0, 0, 0, 0},
+                {0, 0, 2, 0, 0, 0},
+                {0, 0, 0, 2, 0, 0},
+                {0, 0, 0, 0, 2, 0},
+                {0, 0, 0, 0, 0, 2}
+        }, new double[]{0, 1.1, 2.2, 3.3, 4.4, 5.5});
 
-        double[] optimum = optimizer.optimize().getPoint();
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
+        Optimum optimum = optimizer.optimize(problem.getBuilder().build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
         for (int i = 0; i < problem.target.length; ++i) {
-            Assert.assertEquals(0.55 * i, optimum[i], 1e-10);
+            Assert.assertEquals(0.55 * i, optimum.getPoint()[i], 1e-10);
         }
     }
 
-    @Test
-    public void testOneSet() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  1,  0, 0 },
-                { -1,  1, 0 },
-                {  0, -1, 1 }
-        }, new double[] { 1, 1, 1});
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0, 0 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(1, optimum[0], 1e-10);
-        Assert.assertEquals(2, optimum[1], 1e-10);
-        Assert.assertEquals(3, optimum[2], 1e-10);
+    public void testOneSet(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {1, 0, 0},
+                {-1, 1, 0},
+                {0, -1, 1}
+        }, new double[]{1, 1, 1});
+
+        Optimum optimum = optimizer.optimize(problem.getBuilder().build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[0], 1e-10);
+        Assert.assertEquals(2, optimum.getPoint()[1], 1e-10);
+        Assert.assertEquals(3, optimum.getPoint()[2], 1e-10);
     }
 
-    @Test
-    public void testTwoSets() {
+    public void testTwoSets(LeastSquaresOptimizer optimizer) {
         double epsilon = 1e-7;
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  2,  1,   0,  4,       0, 0 },
-                { -4, -2,   3, -7,       0, 0 },
-                {  4,  1,  -2,  8,       0, 0 },
-                {  0, -3, -12, -1,       0, 0 },
-                {  0,  0,   0,  0, epsilon, 1 },
-                {  0,  0,   0,  0,       1, 1 }
-        }, new double[] { 2, -9, 2, 2, 1 + epsilon * epsilon, 2});
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0, 0, 0, 0, 0 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(3, optimum[0], 1e-10);
-        Assert.assertEquals(4, optimum[1], 1e-10);
-        Assert.assertEquals(-1, optimum[2], 1e-10);
-        Assert.assertEquals(-2, optimum[3], 1e-10);
-        Assert.assertEquals(1 + epsilon, optimum[4], 1e-10);
-        Assert.assertEquals(1 - epsilon, optimum[5], 1e-10);
-    }
-
-    @Test(expected=ConvergenceException.class)
-    public void testNonInvertible() throws Exception {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  1, 2, -3 },
-                {  2, 1,  3 },
-                { -3, 0, -9 }
-        }, new double[] { 1, 1, 1 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0, 0 });
-
-        optimizer.optimize();
-    }
-
-    @Test
-    public void testIllConditioned() {
-        LinearProblem problem1 = new LinearProblem(new double[][] {
-                { 10, 7,  8,  7 },
-                {  7, 5,  6,  5 },
-                {  8, 6, 10,  9 },
-                {  7, 5,  9, 10 }
-        }, new double[] { 32, 23, 33, 31 });
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem1.getModelFunction(),
-                                  problem1.getModelFunctionJacobian())
-            .withTarget(problem1.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 1, 2, 3 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(1, optimum[0], 1e-10);
-        Assert.assertEquals(1, optimum[1], 1e-10);
-        Assert.assertEquals(1, optimum[2], 1e-10);
-        Assert.assertEquals(1, optimum[3], 1e-10);
-
-        LinearProblem problem2 = new LinearProblem(new double[][] {
-                { 10.00, 7.00, 8.10, 7.20 },
-                {  7.08, 5.04, 6.00, 5.00 },
-                {  8.00, 5.98, 9.89, 9.00 },
-                {  6.99, 4.99, 9.00, 9.98 }
-        }, new double[] { 32, 23, 33, 31 });
-
-        optimizer = optimizer
-            .withModelAndJacobian(problem2.getModelFunction(),
-                                  problem2.getModelFunctionJacobian())
-            .withTarget(problem2.getTarget());
-
-        optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(-81, optimum[0], 1e-8);
-        Assert.assertEquals(137, optimum[1], 1e-8);
-        Assert.assertEquals(-34, optimum[2], 1e-8);
-        Assert.assertEquals( 22, optimum[3], 1e-8);
-    }
-
-    @Test
-    public void testMoreEstimatedParametersSimple() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 3, 2,  0, 0 },
-                { 0, 1, -1, 1 },
-                { 2, 0,  1, 0 }
-        }, new double[] { 7, 3, 5 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 7, 6, 5, 4 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-    }
-
-    @Test
-    public void testMoreEstimatedParametersUnsorted() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1, 1,  0,  0, 0,  0 },
-                { 0, 0,  1,  1, 1,  0 },
-                { 0, 0,  0,  0, 1, -1 },
-                { 0, 0, -1,  1, 0,  1 },
-                { 0, 0,  0, -1, 1,  0 }
-       }, new double[] { 3, 12, -1, 7, 1 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 2, 2, 2, 2, 2, 2 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(3, optimum[2], 1e-10);
-        Assert.assertEquals(4, optimum[3], 1e-10);
-        Assert.assertEquals(5, optimum[4], 1e-10);
-        Assert.assertEquals(6, optimum[5], 1e-10);
-    }
-
-    @Test
-    public void testRedundantEquations() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1,  1 },
-                { 1, -1 },
-                { 1,  3 }
-        }, new double[] { 3, 1, 5 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 1, 1 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(2, optimum[0], 1e-10);
-        Assert.assertEquals(1, optimum[1], 1e-10);
-    }
-
-    @Test
-    public void testInconsistentEquations() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1,  1 },
-                { 1, -1 },
-                { 1,  3 }
-        }, new double[] { 3, 1, 4 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 1, 1 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertTrue(optimizer.computeRMS(optimum) > 0.1);
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {2, 1, 0, 4, 0, 0},
+                {-4, -2, 3, -7, 0, 0},
+                {4, 1, -2, 8, 0, 0},
+                {0, -3, -12, -1, 0, 0},
+                {0, 0, 0, 0, epsilon, 1},
+                {0, 0, 0, 0, 1, 1}
+        }, new double[]{2, -9, 2, 2, 1 + epsilon * epsilon, 2});
+
+        Optimum optimum = optimizer.optimize(problem.getBuilder().build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(3, optimum.getPoint()[0], 1e-10);
+        Assert.assertEquals(4, optimum.getPoint()[1], 1e-10);
+        Assert.assertEquals(-1, optimum.getPoint()[2], 1e-10);
+        Assert.assertEquals(-2, optimum.getPoint()[3], 1e-10);
+        Assert.assertEquals(1 + epsilon, optimum.getPoint()[4], 1e-10);
+        Assert.assertEquals(1 - epsilon, optimum.getPoint()[5], 1e-10);
+    }
+
+    public void testNonInvertible(LeastSquaresOptimizer optimizer) throws Exception {
+        try {
+            LinearProblem problem = new LinearProblem(new double[][]{
+                    {1, 2, -3},
+                    {2, 1, 3},
+                    {-3, 0, -9}
+            }, new double[]{1, 1, 1});
+
+            optimizer.optimize(problem.getBuilder().build());
+
+            fail(optimizer);
+        } catch (ConvergenceException e) {
+            //expected
+        }
     }
 
-    @Test(expected=DimensionMismatchException.class)
-    public void testInconsistentSizes1() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, 0 },
-                                                 { 0, 1 } },
-                                new double[] { -1, 1 });
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1 }))
-            .withStartPoint(new double[] { 0, 0 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(-1, optimum[0], 1e-10);
-        Assert.assertEquals(1, optimum[1], 1e-10);
-
-        optimizer.withWeight(new DiagonalMatrix(new double[] { 1 })).optimize();
+    public void testIllConditioned(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem1 = new LinearProblem(new double[][]{
+                {10, 7, 8, 7},
+                {7, 5, 6, 5},
+                {8, 6, 10, 9},
+                {7, 5, 9, 10}
+        }, new double[]{32, 23, 33, 31});
+        final double[] start = {0, 1, 2, 3};
+
+        Optimum optimum = optimizer
+                .optimize(problem1.getBuilder().start(start).build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[0], 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[2], 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[3], 1e-10);
+
+        LinearProblem problem2 = new LinearProblem(new double[][]{
+                {10.00, 7.00, 8.10, 7.20},
+                {7.08, 5.04, 6.00, 5.00},
+                {8.00, 5.98, 9.89, 9.00},
+                {6.99, 4.99, 9.00, 9.98}
+        }, new double[]{32, 23, 33, 31});
+
+        optimum = optimizer.optimize(problem2.getBuilder().start(start).build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(-81, optimum.getPoint()[0], 1e-8);
+        Assert.assertEquals(137, optimum.getPoint()[1], 1e-8);
+        Assert.assertEquals(-34, optimum.getPoint()[2], 1e-8);
+        Assert.assertEquals(22, optimum.getPoint()[3], 1e-8);
+    }
+
+    public void testMoreEstimatedParametersSimple(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {3, 2, 0, 0},
+                {0, 1, -1, 1},
+                {2, 0, 1, 0}
+        }, new double[]{7, 3, 5});
+
+        Optimum optimum = optimizer
+                .optimize(problem.getBuilder().start(new double[]{7, 6, 5, 4}).build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+    }
+
+    public void testMoreEstimatedParametersUnsorted(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {1, 1, 0, 0, 0, 0},
+                {0, 0, 1, 1, 1, 0},
+                {0, 0, 0, 0, 1, -1},
+                {0, 0, -1, 1, 0, 1},
+                {0, 0, 0, -1, 1, 0}
+        }, new double[]{3, 12, -1, 7, 1});
+
+        Optimum optimum = optimizer.optimize(
+                problem.getBuilder().start(new double[]{2, 2, 2, 2, 2, 2}).build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(3, optimum.getPoint()[2], 1e-10);
+        Assert.assertEquals(4, optimum.getPoint()[3], 1e-10);
+        Assert.assertEquals(5, optimum.getPoint()[4], 1e-10);
+        Assert.assertEquals(6, optimum.getPoint()[5], 1e-10);
+    }
+
+    public void testRedundantEquations(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {1, 1},
+                {1, -1},
+                {1, 3}
+        }, new double[]{3, 1, 5});
+
+        Optimum optimum = optimizer
+                .optimize(problem.getBuilder().start(new double[]{1, 1}).build());
+
+        Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+        Assert.assertEquals(2, optimum.getPoint()[0], 1e-10);
+        Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
+    }
+
+    public void testInconsistentEquations(LeastSquaresOptimizer optimizer) {
+        LinearProblem problem = new LinearProblem(new double[][]{
+                {1, 1},
+                {1, -1},
+                {1, 3}
+        }, new double[]{3, 1, 4});
+
+        Optimum optimum = optimizer
+                .optimize(problem.getBuilder().start(new double[]{1, 1}).build());
+
+        //TODO what is this actually testing?
+        Assert.assertTrue(optimum.computeRMS() > 0.1);
+    }
+
+    public void testInconsistentSizes1(LeastSquaresOptimizer optimizer) {
+        try {
+            LinearProblem problem
+                    = new LinearProblem(new double[][]{{1, 0},
+                    {0, 1}},
+                    new double[]{-1, 1});
+
+            //TODO why is this part here? hasn't it been tested already?
+            Optimum optimum = optimizer.optimize(problem.getBuilder().build());
+
+            Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+            Assert.assertEquals(-1, optimum.getPoint()[0], 1e-10);
+            Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
+
+            //TODO move to builder test
+            optimizer.optimize(
+                    problem.getBuilder().weight(new DiagonalMatrix(new double[]{1})).build());
+
+            fail(optimizer);
+        } catch (DimensionMismatchException e) {
+            //expected
+        }
     }
 
-    @Test(expected=DimensionMismatchException.class)
-    public void testInconsistentSizes2() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, 0 }, { 0, 1 } },
-                                new double[] { -1, 1 });
-
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1 }))
-            .withStartPoint(new double[] { 0, 0 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(0, optimizer.computeRMS(optimum), 1e-10);
-        Assert.assertEquals(-1, optimum[0], 1e-10);
-        Assert.assertEquals(1, optimum[1], 1e-10);
-
-        optimizer
-            .withTarget(new double[] { 1 })
-            .withWeight(new DiagonalMatrix(new double[] { 1 }))
-            .optimize();
+    public void testInconsistentSizes2(LeastSquaresOptimizer optimizer) {
+        try {
+            LinearProblem problem
+                    = new LinearProblem(new double[][]{{1, 0}, {0, 1}},
+                    new double[]{-1, 1});
+
+            Optimum optimum = optimizer.optimize(problem.getBuilder().build());
+
+            Assert.assertEquals(0, optimum.computeRMS(), 1e-10);
+            Assert.assertEquals(-1, optimum.getPoint()[0], 1e-10);
+            Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
+
+            //TODO move to builder test
+            optimizer.optimize(
+                    problem.getBuilder()
+                            .target(new double[]{1})
+                            .weight(new DiagonalMatrix(new double[]{1}))
+                            .build()
+            );
+
+            fail(optimizer);
+        } catch (DimensionMismatchException e) {
+            //expected
+        }
     }
 
-    @Test
-    public void testCircleFitting() {
+    public void testCircleFitting(LeastSquaresOptimizer optimizer) {
         CircleVectorial circle = new CircleVectorial();
-        circle.addPoint( 30,  68);
-        circle.addPoint( 50,  -6);
+        circle.addPoint(30, 68);
+        circle.addPoint(50, -6);
         circle.addPoint(110, -20);
-        circle.addPoint( 35,  15);
-        circle.addPoint( 45,  97);
+        circle.addPoint(35, 15);
+        circle.addPoint(45, 97);
+        final double[] start = {98.680, 47.345};
 
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(circle.getModelFunction(),
-                                  circle.getModelFunctionJacobian())
-            .withTarget(new double[] { 0, 0, 0, 0, 0 })
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 98.680, 47.345 });
+        Optimum optimum = optimizer.optimize(builder(circle).start(start).build());
 
-        double[] optimum = optimizer.optimize().getPoint();
-        Assert.assertTrue(optimizer.getEvaluations() < 10);
+        Assert.assertTrue(optimum.getEvaluations() < 10);
 
-        double rms = optimizer.computeRMS(optimum);
-        Assert.assertEquals(1.768262623567235,  FastMath.sqrt(circle.getN()) * rms, 1e-10);
+        double rms = optimum.computeRMS();
+        Assert.assertEquals(1.768262623567235, FastMath.sqrt(circle.getN()) * rms, 1e-10);
 
-        Vector2D center = new Vector2D(optimum[0], optimum[1]);
+        Vector2D center = new Vector2D(optimum.getPoint()[0], optimum.getPoint()[1]);
         Assert.assertEquals(69.96016176931406, circle.getRadius(center), 1e-6);
         Assert.assertEquals(96.07590211815305, center.getX(), 1e-6);
         Assert.assertEquals(48.13516790438953, center.getY(), 1e-6);
 
-        double[][] cov = optimizer.computeCovariances(optimum, 1e-14);
+        double[][] cov = optimum.computeCovariances(1e-14);
         Assert.assertEquals(1.839, cov[0][0], 0.001);
         Assert.assertEquals(0.731, cov[0][1], 0.001);
         Assert.assertEquals(cov[0][1], cov[1][0], 1e-14);
         Assert.assertEquals(0.786, cov[1][1], 0.001);
 
-        // add perfect measurements and check errors are reduced
-        double  r = circle.getRadius(center);
-        for (double d= 0; d < 2 * FastMath.PI; d += 0.01) {
+        // add perfect measurements and check formal errors are reduced
+        double r = circle.getRadius(center);
+        for (double d = 0; d < 2 * FastMath.PI; d += 0.01) {
             circle.addPoint(center.getX() + r * FastMath.cos(d), center.getY() + r * FastMath.sin(d));
         }
 
-        double[] target = new double[circle.getN()];
-        Arrays.fill(target, 0);
         double[] weights = new double[circle.getN()];
         Arrays.fill(weights, 2);
-        optimizer = optimizer.withTarget(target).withWeight(new DiagonalMatrix(weights));
-        optimum = optimizer.optimize().getPoint();
 
-        cov = optimizer.computeCovariances(optimum, 1e-14);
+        optimum = optimizer.optimize(
+                builder(circle).weight(new DiagonalMatrix(weights)).start(start).build());
+
+        cov = optimum.computeCovariances(1e-14);
         Assert.assertEquals(0.0016, cov[0][0], 0.001);
         Assert.assertEquals(3.2e-7, cov[0][1], 1e-9);
         Assert.assertEquals(cov[0][1], cov[1][0], 1e-14);
         Assert.assertEquals(0.0016, cov[1][1], 0.001);
     }
 
-    @Test
-    public void testCircleFittingBadInit() {
+    public void testCircleFittingBadInit(LeastSquaresOptimizer optimizer) {
         CircleVectorial circle = new CircleVectorial();
         double[][] points = circlePoints;
-        double[] target = new double[points.length];
-        Arrays.fill(target, 0);
         double[] weights = new double[points.length];
+        final double[] start = {-12, -12};
         Arrays.fill(weights, 2);
         for (int i = 0; i < points.length; ++i) {
             circle.addPoint(points[i][0], points[i][1]);
         }
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(circle.getModelFunction(),
-                                  circle.getModelFunctionJacobian())
-            .withTarget(target)
-            .withWeight(new DiagonalMatrix(weights))
-            .withStartPoint(new double[] { -12, -12 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Vector2D center = new Vector2D(optimum[0], optimum[1]);
-        Assert.assertTrue(optimizer.getEvaluations() < 25);
-        Assert.assertEquals( 0.043, optimizer.computeRMS(optimum), 1e-3);
-        Assert.assertEquals( 0.292235,  circle.getRadius(center), 1e-6);
-        Assert.assertEquals(-0.151738,  center.getX(), 1e-6);
-        Assert.assertEquals( 0.2075001, center.getY(), 1e-6);
+
+        Optimum optimum = optimizer.optimize(builder(circle).weight(new DiagonalMatrix(weights)).start(start).build());
+
+        Vector2D center = new Vector2D(optimum.getPoint()[0], optimum.getPoint()[1]);
+        Assert.assertTrue(optimum.getEvaluations() < 25);
+        Assert.assertEquals(0.043, optimum.computeRMS(), 1e-3);
+        Assert.assertEquals(0.292235, circle.getRadius(center), 1e-6);
+        Assert.assertEquals(-0.151738, center.getX(), 1e-6);
+        Assert.assertEquals(0.2075001, center.getY(), 1e-6);
     }
 
-    @Test
-    public void testCircleFittingGoodInit() {
+    public void testCircleFittingGoodInit(LeastSquaresOptimizer optimizer) {
         CircleVectorial circle = new CircleVectorial();
         double[][] points = circlePoints;
-        double[] target = new double[points.length];
-        Arrays.fill(target, 0);
         double[] weights = new double[points.length];
         Arrays.fill(weights, 2);
         for (int i = 0; i < points.length; ++i) {
             circle.addPoint(points[i][0], points[i][1]);
         }
-        T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(circle.getModelFunction(),
-                                  circle.getModelFunctionJacobian())
-            .withTarget(target)
-            .withWeight(new DiagonalMatrix(weights))
-            .withStartPoint(new double[] { 0, 0 });
-
-        double[] optimum = optimizer.optimize().getPoint();
-
-        Assert.assertEquals(-0.1517383071957963, optimum[0], 1e-6);
-        Assert.assertEquals(0.2074999736353867,  optimum[1], 1e-6);
-        Assert.assertEquals(0.04268731682389561, optimizer.computeRMS(optimum), 1e-8);
-    }
-
-    private final double[][] circlePoints = new double[][] {
-        {-0.312967,  0.072366}, {-0.339248,  0.132965}, {-0.379780,  0.202724},
-        {-0.390426,  0.260487}, {-0.361212,  0.328325}, {-0.346039,  0.392619},
-        {-0.280579,  0.444306}, {-0.216035,  0.470009}, {-0.149127,  0.493832},
-        {-0.075133,  0.483271}, {-0.007759,  0.452680}, { 0.060071,  0.410235},
-        { 0.103037,  0.341076}, { 0.118438,  0.273884}, { 0.131293,  0.192201},
-        { 0.115869,  0.129797}, { 0.072223,  0.058396}, { 0.022884,  0.000718},
-        {-0.053355, -0.020405}, {-0.123584, -0.032451}, {-0.216248, -0.032862},
-        {-0.278592, -0.005008}, {-0.337655,  0.056658}, {-0.385899,  0.112526},
-        {-0.405517,  0.186957}, {-0.415374,  0.262071}, {-0.387482,  0.343398},
-        {-0.347322,  0.397943}, {-0.287623,  0.458425}, {-0.223502,  0.475513},
-        {-0.135352,  0.478186}, {-0.061221,  0.483371}, { 0.003711,  0.422737},
-        { 0.065054,  0.375830}, { 0.108108,  0.297099}, { 0.123882,  0.222850},
-        { 0.117729,  0.134382}, { 0.085195,  0.056820}, { 0.029800, -0.019138},
-        {-0.027520, -0.072374}, {-0.102268, -0.091555}, {-0.200299, -0.106578},
-        {-0.292731, -0.091473}, {-0.356288, -0.051108}, {-0.420561,  0.014926},
-        {-0.471036,  0.074716}, {-0.488638,  0.182508}, {-0.485990,  0.254068},
-        {-0.463943,  0.338438}, {-0.406453,  0.404704}, {-0.334287,  0.466119},
-        {-0.254244,  0.503188}, {-0.161548,  0.495769}, {-0.075733,  0.495560},
-        { 0.001375,  0.434937}, { 0.082787,  0.385806}, { 0.115490,  0.323807},
-        { 0.141089,  0.223450}, { 0.138693,  0.131703}, { 0.126415,  0.049174},
-        { 0.066518, -0.010217}, {-0.005184, -0.070647}, {-0.080985, -0.103635},
-        {-0.177377, -0.116887}, {-0.260628, -0.100258}, {-0.335756, -0.056251},
-        {-0.405195, -0.000895}, {-0.444937,  0.085456}, {-0.484357,  0.175597},
-        {-0.472453,  0.248681}, {-0.438580,  0.347463}, {-0.402304,  0.422428},
-        {-0.326777,  0.479438}, {-0.247797,  0.505581}, {-0.152676,  0.519380},
-        {-0.071754,  0.516264}, { 0.015942,  0.472802}, { 0.076608,  0.419077},
-        { 0.127673,  0.330264}, { 0.159951,  0.262150}, { 0.153530,  0.172681},
-        { 0.140653,  0.089229}, { 0.078666,  0.024981}, { 0.023807, -0.037022},
-        {-0.048837, -0.077056}, {-0.127729, -0.075338}, {-0.221271, -0.067526}
+        final double[] start = {0, 0};
+
+        Optimum optimum = optimizer.optimize(
+                builder(circle).weight(new DiagonalMatrix(weights)).start(start).build());
+
+        Assert.assertEquals(-0.1517383071957963, optimum.getPoint()[0], 1e-6);
+        Assert.assertEquals(0.2074999736353867, optimum.getPoint()[1], 1e-6);
+        Assert.assertEquals(0.04268731682389561, optimum.computeRMS(), 1e-8);
+    }
+
+    private final double[][] circlePoints = new double[][]{
+            {-0.312967, 0.072366}, {-0.339248, 0.132965}, {-0.379780, 0.202724},
+            {-0.390426, 0.260487}, {-0.361212, 0.328325}, {-0.346039, 0.392619},
+            {-0.280579, 0.444306}, {-0.216035, 0.470009}, {-0.149127, 0.493832},
+            {-0.075133, 0.483271}, {-0.007759, 0.452680}, {0.060071, 0.410235},
+            {0.103037, 0.341076}, {0.118438, 0.273884}, {0.131293, 0.192201},
+            {0.115869, 0.129797}, {0.072223, 0.058396}, {0.022884, 0.000718},
+            {-0.053355, -0.020405}, {-0.123584, -0.032451}, {-0.216248, -0.032862},
+            {-0.278592, -0.005008}, {-0.337655, 0.056658}, {-0.385899, 0.112526},
+            {-0.405517, 0.186957}, {-0.415374, 0.262071}, {-0.387482, 0.343398},
+            {-0.347322, 0.397943}, {-0.287623, 0.458425}, {-0.223502, 0.475513},
+            {-0.135352, 0.478186}, {-0.061221, 0.483371}, {0.003711, 0.422737},
+            {0.065054, 0.375830}, {0.108108, 0.297099}, {0.123882, 0.222850},
+            {0.117729, 0.134382}, {0.085195, 0.056820}, {0.029800, -0.019138},
+            {-0.027520, -0.072374}, {-0.102268, -0.091555}, {-0.200299, -0.106578},
+            {-0.292731, -0.091473}, {-0.356288, -0.051108}, {-0.420561, 0.014926},
+            {-0.471036, 0.074716}, {-0.488638, 0.182508}, {-0.485990, 0.254068},
+            {-0.463943, 0.338438}, {-0.406453, 0.404704}, {-0.334287, 0.466119},
+            {-0.254244, 0.503188}, {-0.161548, 0.495769}, {-0.075733, 0.495560},
+            {0.001375, 0.434937}, {0.082787, 0.385806}, {0.115490, 0.323807},
+            {0.141089, 0.223450}, {0.138693, 0.131703}, {0.126415, 0.049174},
+            {0.066518, -0.010217}, {-0.005184, -0.070647}, {-0.080985, -0.103635},
+            {-0.177377, -0.116887}, {-0.260628, -0.100258}, {-0.335756, -0.056251},
+            {-0.405195, -0.000895}, {-0.444937, 0.085456}, {-0.484357, 0.175597},
+            {-0.472453, 0.248681}, {-0.438580, 0.347463}, {-0.402304, 0.422428},
+            {-0.326777, 0.479438}, {-0.247797, 0.505581}, {-0.152676, 0.519380},
+            {-0.071754, 0.516264}, {0.015942, 0.472802}, {0.076608, 0.419077},
+            {0.127673, 0.330264}, {0.159951, 0.262150}, {0.153530, 0.172681},
+            {0.140653, 0.089229}, {0.078666, 0.024981}, {0.023807, -0.037022},
+            {-0.048837, -0.077056}, {-0.127729, -0.075338}, {-0.221271, -0.067526}
     };
 
     public void doTestStRD(final StatisticalReferenceDataset dataset,
+                           final LeastSquaresOptimizer optimizer,
                            final double errParams,
                            final double errParamsSd) {
-        final double[] w = new double[dataset.getNumObservations()];
-        Arrays.fill(w, 1);
 
-        final double[][] data = dataset.getData();
-        final double[] initial = dataset.getStartingPoint(0);
-        final StatisticalReferenceDataset.LeastSquaresProblem problem = dataset.getLeastSquaresProblem();
-
-        final T optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(getMaxIterations())
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(data[1])
-            .withWeight(new DiagonalMatrix(w))
-            .withStartPoint(initial);
+        final Optimum optimum = optimizer.optimize(builder(dataset).build());
 
-        final double[] actual = optimizer.optimize().getPoint();
+        final double[] actual = optimum.getPoint();
         for (int i = 0; i < actual.length; i++) {
             double expected = dataset.getParameter(i);
             double delta = FastMath.abs(errParams * expected);
             Assert.assertEquals(dataset.getName() + ", param #" + i,
-                                expected, actual[i], delta);
+                    expected, actual[i], delta);
         }
     }
 
-    @Test
-    public void testKirby2() throws IOException {
-        doTestStRD(StatisticalReferenceDatasetFactory.createKirby2(), 1E-7, 1E-7);
+    public void testKirby2(LeastSquaresOptimizer optimizer) throws IOException {
+        doTestStRD(StatisticalReferenceDatasetFactory.createKirby2(), optimizer, 1E-7, 1E-7);
     }
 
-    @Test
-    public void testHahn1() throws IOException {
-        doTestStRD(StatisticalReferenceDatasetFactory.createHahn1(), 1E-7, 1E-4);
+    public void testHahn1(LeastSquaresOptimizer optimizer) throws IOException {
+        doTestStRD(StatisticalReferenceDatasetFactory.createHahn1(), optimizer, 1E-7, 1E-4);
     }
 
-    static class LinearProblem {
+    class LinearProblem {
         private final RealMatrix factors;
         private final double[] target;
 
         public LinearProblem(double[][] factors, double[] target) {
             this.factors = new BlockRealMatrix(factors);
-            this.target  = target;
+            this.target = target;
         }
 
         public double[] getTarget() {
@@ -713,5 +554,16 @@ public abstract class AbstractLeastSquar
                 }
             };
         }
+
+        public LeastSquaresBuilder getBuilder() {
+            final double[] weights = new double[target.length];
+            Arrays.fill(weights, 1.0);
+            return base()
+                    .model(getModelFunction())
+                    .jacobian(getModelFunctionJacobian())
+                    .target(target)
+                    .weight(new DiagonalMatrix(weights))
+                    .start(new double[factors.getColumnDimension()]);
+        }
     }
 }

Copied: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java (from r1568754, commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTest.java)
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java?p2=commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java&p1=commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTest.java&r1=1568754&r2=1569342&rev=1569342&view=diff
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTest.java (original)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java Tue Feb 18 14:31:34 2014
@@ -13,40 +13,52 @@
  */
 package org.apache.commons.math3.fitting.leastsquares;
 
-import java.io.IOException;
-import java.util.Arrays;
-import org.apache.commons.math3.optim.PointVectorValuePair;
+import org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem.Evaluation;
 import org.apache.commons.math3.linear.DiagonalMatrix;
 import org.apache.commons.math3.util.FastMath;
-import org.junit.Test;
 import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
 /**
  * The only features tested here are utility methods defined
- * in {@link AbstractLeastSquaresOptimizer} that compute the
+ * in {@link LeastSquaresProblem.Evaluation} that compute the
  * chi-square and parameters standard-deviations.
  */
-public class AbstractLeastSquaresOptimizerTest {
+public class EvaluationTest {
+
+    /**
+     * Create a {@link LeastSquaresBuilder} from a {@link StatisticalReferenceDataset}.
+     *
+     * @param dataset the source data
+     * @return a builder for further customization.
+     */
+    public LeastSquaresBuilder builder(StatisticalReferenceDataset dataset) {
+        StatisticalReferenceDataset.LeastSquaresProblem problem
+                = dataset.getLeastSquaresProblem();
+        final double[] start = dataset.getParameters();
+        final double[] observed = dataset.getData()[1];
+        final double[] weights = new double[observed.length];
+        Arrays.fill(weights, 1d);
+
+        return new LeastSquaresBuilder()
+                .model(problem.getModelFunction())
+                .jacobian(problem.getModelFunctionJacobian())
+                .target(observed)
+                .weight(new DiagonalMatrix(weights))
+                .start(start);
+    }
+
     @Test
     public void testComputeCost() throws IOException {
         final StatisticalReferenceDataset dataset
             = StatisticalReferenceDatasetFactory.createKirby2();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1d);
-
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
 
-        final LevenbergMarquardtOptimizer optim = LevenbergMarquardtOptimizer.create()
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(y)
-            .withWeight(new DiagonalMatrix(w))
-            .withStartPoint(a);
+        final LeastSquaresProblem lsp = builder(dataset).build();
 
         final double expected = dataset.getResidualSumOfSquares();
-        final double cost = optim.computeCost(optim.computeResiduals(optim.getModel().value(optim.getStart())));
+        final double cost = lsp.evaluate(lsp.getStart()).computeCost();
         final double actual = cost * cost;
         Assert.assertEquals(dataset.getName(), expected, actual, 1e-11 * expected);
     }
@@ -55,24 +67,12 @@ public class AbstractLeastSquaresOptimiz
     public void testComputeRMS() throws IOException {
         final StatisticalReferenceDataset dataset
             = StatisticalReferenceDatasetFactory.createKirby2();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1d);
 
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
-
-        final LevenbergMarquardtOptimizer optim = LevenbergMarquardtOptimizer.create()
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(y)
-            .withWeight(new DiagonalMatrix(w))
-            .withStartPoint(a);
+        final LeastSquaresProblem lsp = builder(dataset).build();
 
         final double expected = FastMath.sqrt(dataset.getResidualSumOfSquares() /
                                               dataset.getNumObservations());
-        final double actual = optim.computeRMS(optim.getStart());
+        final double actual = lsp.evaluate(lsp.getStart()).computeRMS();
         Assert.assertEquals(dataset.getName(), expected, actual, 1e-11 * expected);
     }
 
@@ -80,26 +80,15 @@ public class AbstractLeastSquaresOptimiz
     public void testComputeSigma() throws IOException {
         final StatisticalReferenceDataset dataset
             = StatisticalReferenceDatasetFactory.createKirby2();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1d);
-
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
 
-        final LevenbergMarquardtOptimizer optim = LevenbergMarquardtOptimizer.create()
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(y)
-            .withWeight(new DiagonalMatrix(w))
-            .withStartPoint(a);
+        final LeastSquaresProblem lsp = builder(dataset).build();
 
         final double[] expected = dataset.getParametersStandardDeviations();
 
-        final double cost = optim.computeCost(optim.computeResiduals(optim.getModel().value(optim.getStart())));
-        final double[] sig = optim.computeSigma(optim.getStart(), 1e-14);
-        final int dof = y.length - a.length;
+        final Evaluation evaluation = lsp.evaluate(lsp.getStart());
+        final double cost = evaluation.computeCost();
+        final double[] sig = evaluation.computeSigma(1e-14);
+        final int dof = lsp.getObservationSize() - lsp.getParameterSize();
         for (int i = 0; i < sig.length; i++) {
             final double actual = FastMath.sqrt(cost * cost / dof) * sig[i];
             Assert.assertEquals(dataset.getName() + ", parameter #" + i,

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTest.java
------------------------------------------------------------------------------
    svn:keywords = "Author Date Id Revision"

Copied: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java (from r1568754, commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTestValidation.java)
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java?p2=commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java&p1=commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTestValidation.java&r1=1568754&r2=1569342&rev=1569342&view=diff
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/AbstractLeastSquaresOptimizerTestValidation.java (original)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java Tue Feb 18 14:31:34 2014
@@ -17,7 +17,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.ArrayList;
 import java.awt.geom.Point2D;
-import org.apache.commons.math3.optim.PointVectorValuePair;
+
 import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.commons.math3.stat.descriptive.StatisticalSummary;
 import org.apache.commons.math3.linear.DiagonalMatrix;
@@ -27,7 +27,7 @@ import org.junit.Assert;
 
 /**
  * This class demonstrates the main functionality of the
- * {@link AbstractLeastSquaresOptimizer}, common to the
+ * {@link LeastSquaresProblem.Evaluation}, common to the
  * optimizer implementations in package
  * {@link org.apache.commons.math3.fitting.leastsquares}.
  * <br/>
@@ -35,14 +35,14 @@ import org.junit.Assert;
  * <br/>
  * Invoke by running
  * <pre><code>
- *  mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation
+ *  mvn test -Dtest=EvaluationTestValidation
  * </code></pre>
  * or by running
  * <pre><code>
- *  mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation -DargLine="-DmcRuns=1234 -server"
+ *  mvn test -Dtest=EvaluationTestValidation -DargLine="-DmcRuns=1234 -server"
  * </code></pre>
  */
-public class AbstractLeastSquaresOptimizerTestValidation {
+public class EvaluationTestValidation {
     /** Number of runs. */
     private static final int MONTE_CARLO_RUNS = Integer.parseInt(System.getProperty("mcRuns",
                                                                                     "100"));
@@ -115,14 +115,9 @@ public class AbstractLeastSquaresOptimiz
 
             // Estimation of the standard deviation (diagonal elements of the
             // covariance matrix).
-            // Dummy optimizer (to compute the covariance matrix).
-            final AbstractLeastSquaresOptimizer optim = LevenbergMarquardtOptimizer.create()
-                .withModelAndJacobian(problem.getModelFunction(),
-                                      problem.getModelFunctionJacobian())
-                .withTarget(problem.target())
-                .withWeight(new DiagonalMatrix(problem.weight()));
+            final LeastSquaresProblem lsp = builder(problem).build();
 
-            final double[] sigma = optim.computeSigma(init, 1e-14);
+            final double[] sigma = lsp.evaluate(init).computeSigma(1e-14);
 
             // Accumulate statistics.
             for (int i = 0; i < numParams; i++) {
@@ -220,17 +215,13 @@ public class AbstractLeastSquaresOptimiz
         final double[] regress = problem.solve();
 
         // Dummy optimizer (to compute the chi-square).
-        final AbstractLeastSquaresOptimizer optim = LevenbergMarquardtOptimizer.create()
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.target())
-            .withWeight(new DiagonalMatrix(problem.weight()));
+        final LeastSquaresProblem lsp = builder(problem).build();
 
         final double[] init = { slope, offset };
         // Get chi-square of the best parameters set for the given set of
         // observations.
-        final double bestChi2N = getChi2N(optim, problem, regress);
-        final double[] sigma = optim.computeSigma(regress, 1e-14);
+        final double bestChi2N = getChi2N(lsp, regress);
+        final double[] sigma = lsp.evaluate(regress).computeSigma(1e-14);
 
         // Monte-Carlo (generates a grid of parameters).
         final int mcRepeat = MONTE_CARLO_RUNS;
@@ -252,7 +243,7 @@ public class AbstractLeastSquaresOptimiz
             final double s = minSlope + i * deltaSlope;
             for (int j = 0; j < gridSize; j++) {
                 final double o = minOffset + j * deltaOffset;
-                final double chi2N = getChi2N(optim, problem, new double[] {s, o});
+                final double chi2N = getChi2N(lsp, new double[] {s, o});
 
                 paramsAndChi2.add(new double[] {s, o, chi2N});
             }
@@ -293,16 +284,20 @@ public class AbstractLeastSquaresOptimiz
         System.out.println("# " + numLarger + " sets filtered out");
     }
 
+    LeastSquaresBuilder builder(StraightLineProblem problem){
+        return new LeastSquaresBuilder()
+                .model(problem.getModelFunction())
+                .jacobian(problem.getModelFunctionJacobian())
+                .target(problem.target())
+                .weight(new DiagonalMatrix(problem.weight()));
+    }
     /**
      * @return the normalized chi-square.
      */
-    private double getChi2N(AbstractLeastSquaresOptimizer optim,
-                            StraightLineProblem problem,
+    private double getChi2N(LeastSquaresProblem lsp,
                             double[] params) {
-        final double[] t = problem.target();
-        final double[] w = problem.weight();
-        final double cost = optim.computeCost(optim.computeResiduals(optim.getModel().value(params)));
-        return cost * cost / (t.length - params.length);
+        final double cost = lsp.evaluate(params).computeCost();
+        return cost * cost / (lsp.getObservationSize() - params.length);
     }
 }
 

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/EvaluationTestValidation.java
------------------------------------------------------------------------------
    svn:keywords = "Author Date Id Revision"

Modified: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/GaussNewtonOptimizerTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/GaussNewtonOptimizerTest.java?rev=1569342&r1=1569341&r2=1569342&view=diff
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/GaussNewtonOptimizerTest.java (original)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/GaussNewtonOptimizerTest.java Tue Feb 18 14:31:34 2014
@@ -17,15 +17,14 @@
 
 package org.apache.commons.math3.fitting.leastsquares;
 
-import java.io.IOException;
 import org.apache.commons.math3.exception.ConvergenceException;
 import org.apache.commons.math3.exception.TooManyEvaluationsException;
-import org.apache.commons.math3.exception.MathUnsupportedOperationException;
 import org.apache.commons.math3.optim.SimpleVectorValueChecker;
-import org.apache.commons.math3.linear.DiagonalMatrix;
 import org.junit.Test;
 import org.junit.Assert;
 
+import java.io.IOException;
+
 /**
  * <p>Some of the unit tests are re-implementations of the MINPACK <a
  * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
@@ -36,60 +35,58 @@ import org.junit.Assert;
  * @version $Id$
  */
 public class GaussNewtonOptimizerTest
-    extends AbstractLeastSquaresOptimizerAbstractTest<GaussNewtonOptimizer> {
-    @Override
-    public GaussNewtonOptimizer createOptimizer() {
-        return GaussNewtonOptimizer.create()
-            .withConvergenceChecker(new SimpleVectorValueChecker(1e-6, 1e-6));
-    }
+    extends AbstractLeastSquaresOptimizerAbstractTest {
 
     @Override
     public int getMaxIterations() {
         return 1000;
     }
 
-    @Override
     @Test
-    public void testShallowCopy() {
-        super.testShallowCopy(); // Test copy of parent.
-
-        final boolean useLU1 = false;
-        final GaussNewtonOptimizer optim1 = createOptimizer()
-            .withLU(useLU1);
-
-        final GaussNewtonOptimizer optim2 = optim1.shallowCopy();
-
-        // Check that all fields have the same values.
-        Assert.assertTrue(optim1.getLU() == optim2.getLU());
+    public void testGaussNewtonLU() throws Exception {
+        check(new GaussNewtonOptimizer(true));
+    }
 
-        // Change "optim2".
-        final boolean useLU2 = true;
-        optim2.withLU(useLU2);
+    @Test
+    public void testGaussNewtonQR() throws Exception {
+        check(new GaussNewtonOptimizer(false));
+    }
 
-        // Check that all fields now have different values.
-        Assert.assertFalse(optim1.getLU() == optim2.getLU());
+    @Override
+    public void check(LeastSquaresOptimizer optimizer) throws Exception {
+        super.check(optimizer);
+        //add an additional test
+        testMaxEvaluations(optimizer);
     }
 
     @Override
-    @Test(expected=ConvergenceException.class)
-    public void testMoreEstimatedParametersSimple() {
+    public void testMoreEstimatedParametersSimple(LeastSquaresOptimizer optimizer) {
         /*
          * Exception is expected with this optimizer
          */
-        super.testMoreEstimatedParametersSimple();
+        try {
+            super.testMoreEstimatedParametersSimple(optimizer);
+            fail(optimizer);
+        } catch (ConvergenceException e) {
+            //expected
+        }
     }
 
     @Override
-    @Test(expected=ConvergenceException.class)
-    public void testMoreEstimatedParametersUnsorted() {
+    public void testMoreEstimatedParametersUnsorted(LeastSquaresOptimizer optimizer) {
         /*
          * Exception is expected with this optimizer
          */
-        super.testMoreEstimatedParametersUnsorted();
+        try{
+            super.testMoreEstimatedParametersUnsorted(optimizer);
+            fail(optimizer);
+        }catch (ConvergenceException e){
+            //expected
+        }
     }
 
-    @Test(expected=TooManyEvaluationsException.class)
-    public void testMaxEvaluations() throws Exception {
+    public void testMaxEvaluations(LeastSquaresOptimizer optimizer) throws Exception {
+        try{
         CircleVectorial circle = new CircleVectorial();
         circle.addPoint( 30.0,  68.0);
         circle.addPoint( 50.0,  -6.0);
@@ -97,36 +94,48 @@ public class GaussNewtonOptimizerTest
         circle.addPoint( 35.0,  15.0);
         circle.addPoint( 45.0,  97.0);
 
-        GaussNewtonOptimizer optimizer = createOptimizer()
-            .withConvergenceChecker(new SimpleVectorValueChecker(1e-30, 1e-30))
-            .withMaxIterations(Integer.MAX_VALUE)
-            .withMaxEvaluations(100)
-            .withModelAndJacobian(circle.getModelFunction(),
-                                  circle.getModelFunctionJacobian())
-            .withTarget(new double[] { 0, 0, 0, 0, 0 })
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1 }))
-            .withStartPoint(new double[] { 98.680, 47.345 });
+        LeastSquaresProblem lsp = builder(circle)
+                .checker(new SimpleVectorValueChecker(1e-30, 1e-30))
+                .maxIterations(Integer.MAX_VALUE)
+                .start(new double[]{98.680, 47.345})
+                .build();
+
+        optimizer.optimize(lsp);
 
-        optimizer.optimize();
+            fail(optimizer);
+        }catch (TooManyEvaluationsException e){
+            //expected
+        }
     }
 
     @Override
-    @Test(expected=ConvergenceException.class)
-    public void testCircleFittingBadInit() {
+    public void testCircleFittingBadInit(LeastSquaresOptimizer optimizer) {
         /*
          * This test does not converge with this optimizer.
          */
-        super.testCircleFittingBadInit();
+        try{
+            super.testCircleFittingBadInit(optimizer);
+            fail(optimizer);
+        }catch (ConvergenceException e){
+            //expected
+        }
     }
 
     @Override
-    @Test(expected=ConvergenceException.class)
-    public void testHahn1()
+    public void testHahn1(LeastSquaresOptimizer optimizer)
         throws IOException {
         /*
          * TODO This test leads to a singular problem with the Gauss-Newton
          * optimizer. This should be inquired.
          */
-        super.testHahn1();
+        try{
+            super.testHahn1(optimizer);
+            fail(optimizer);
+        } catch (ConvergenceException e){
+            //expected for LU
+        } catch (TooManyEvaluationsException e){
+            //expected for QR
+        }
     }
+
 }

Modified: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/LevenbergMarquardtOptimizerTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/LevenbergMarquardtOptimizerTest.java?rev=1569342&r1=1569341&r2=1569342&view=diff
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/LevenbergMarquardtOptimizerTest.java (original)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/LevenbergMarquardtOptimizerTest.java Tue Feb 18 14:31:34 2014
@@ -17,24 +17,21 @@
 
 package org.apache.commons.math3.fitting.leastsquares;
 
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-import org.apache.commons.math3.optim.PointVectorValuePair;
-import org.apache.commons.math3.analysis.MultivariateVectorFunction;
 import org.apache.commons.math3.analysis.MultivariateMatrixFunction;
-import org.apache.commons.math3.exception.ConvergenceException;
+import org.apache.commons.math3.analysis.MultivariateVectorFunction;
 import org.apache.commons.math3.exception.DimensionMismatchException;
 import org.apache.commons.math3.exception.TooManyEvaluationsException;
-import org.apache.commons.math3.exception.MathUnsupportedOperationException;
+import org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum;
 import org.apache.commons.math3.geometry.euclidean.twod.Vector2D;
-import org.apache.commons.math3.linear.SingularMatrixException;
 import org.apache.commons.math3.linear.DiagonalMatrix;
+import org.apache.commons.math3.linear.SingularMatrixException;
 import org.apache.commons.math3.util.FastMath;
 import org.apache.commons.math3.util.Precision;
 import org.junit.Assert;
 import org.junit.Test;
-import org.junit.Ignore;
+
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * <p>Some of the unit tests are re-implementations of the MINPACK <a
@@ -46,10 +43,20 @@ import org.junit.Ignore;
  * @version $Id$
  */
 public class LevenbergMarquardtOptimizerTest
-    extends AbstractLeastSquaresOptimizerAbstractTest<LevenbergMarquardtOptimizer> {
-    @Override
-    public LevenbergMarquardtOptimizer createOptimizer() {
-        return LevenbergMarquardtOptimizer.create();
+    extends AbstractLeastSquaresOptimizerAbstractTest{
+
+    public LeastSquaresBuilder builder(BevingtonProblem problem){
+        return base()
+                .model(problem.getModelFunction())
+                .jacobian(problem.getModelFunctionJacobian());
+    }
+
+    public LeastSquaresBuilder builder(CircleProblem problem){
+        return base()
+                .model(problem.getModelFunction())
+                .jacobian(problem.getModelFunctionJacobian())
+                .target(problem.target())
+                .weight(new DiagonalMatrix(problem.weight()));
     }
 
     @Override
@@ -57,79 +64,44 @@ public class LevenbergMarquardtOptimizer
         return 25;
     }
 
-    @Override
     @Test
-    public void testShallowCopy() {
-        super.testShallowCopy(); // Test copy of parent.
-
-        final double initStep1 = 1e-1;
-        final double costTol1 = 1e-1;
-        final double parTol1 = 1e-1;
-        final double orthoTol1 = 1e-1;
-        final double threshold1 = 1e-1;
-        final LevenbergMarquardtOptimizer optim1 = createOptimizer()
-            .withInitialStepBoundFactor(initStep1)
-            .withCostRelativeTolerance(costTol1)
-            .withParameterRelativeTolerance(parTol1)
-            .withOrthoTolerance(orthoTol1)
-            .withRankingThreshold(threshold1);
-
-        final LevenbergMarquardtOptimizer optim2 = optim1.shallowCopy();
-
-        // Check that all fields have the same values.
-        Assert.assertTrue(optim1.getInitialStepBoundFactor() == optim2.getInitialStepBoundFactor());
-        Assert.assertTrue(optim1.getCostRelativeTolerance() == optim2.getCostRelativeTolerance());
-        Assert.assertTrue(optim1.getParameterRelativeTolerance() == optim2.getParameterRelativeTolerance());
-        Assert.assertTrue(optim1.getOrthoTolerance() == optim2.getOrthoTolerance());
-        Assert.assertTrue(optim1.getRankingThreshold() == optim2.getRankingThreshold());
-
-        // Change "optim2".
-        final double initStep2 = 2e-1;
-        final double costTol2 = 2e-1;
-        final double parTol2 = 2e-1;
-        final double orthoTol2 = 2e-1;
-        final double threshold2 = 2e-1;
-        optim2
-            .withInitialStepBoundFactor(initStep2)
-            .withCostRelativeTolerance(costTol2)
-            .withParameterRelativeTolerance(parTol2)
-            .withOrthoTolerance(orthoTol2)
-            .withRankingThreshold(threshold2);
-
-        // Check that all fields now have different values.
-        Assert.assertFalse(optim1.getInitialStepBoundFactor() == optim2.getInitialStepBoundFactor());
-        Assert.assertFalse(optim1.getCostRelativeTolerance() == optim2.getCostRelativeTolerance());
-        Assert.assertFalse(optim1.getParameterRelativeTolerance() == optim2.getParameterRelativeTolerance());
-        Assert.assertFalse(optim1.getOrthoTolerance() == optim2.getOrthoTolerance());
-        Assert.assertFalse(optim1.getRankingThreshold() == optim2.getRankingThreshold());
+    public void testLevenberMarquardtOptimizer() throws Exception {
+        check(new LevenbergMarquardtOptimizer());
     }
 
     @Override
-    @Test(expected=SingularMatrixException.class)
-    public void testNonInvertible() {
-        /*
-         * Overrides the method from parent class, since the default singularity
-         * threshold (1e-14) does not trigger the expected exception.
-         */
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  1, 2, -3 },
-                {  2, 1,  3 },
-                { -3, 0, -9 }
-        }, new double[] { 1, 1, 1 });
-
-        final LevenbergMarquardtOptimizer optimizer = createOptimizer()
-            .withMaxEvaluations(100)
-            .withMaxIterations(20)
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(problem.getTarget())
-            .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1 }))
-            .withStartPoint(new double[] { 0, 0, 0 });
-
-        final double[] optimum = optimizer.optimize().getPoint();
-        Assert.assertTrue(FastMath.sqrt(optimizer.getTarget().length) * optimizer.computeRMS(optimum) > 0.6);
+    public void check(LeastSquaresOptimizer optimizer) throws Exception {
+        super.check(optimizer);
+        //add LM specific tests
+        testBevington(optimizer);
+        testCircleFitting2(optimizer);
+    }
 
-        optimizer.computeCovariances(optimum, 1.5e-14);
+    @Override
+    public void testNonInvertible(LeastSquaresOptimizer optimizer) {
+        try{
+            /*
+             * Overrides the method from parent class, since the default singularity
+             * threshold (1e-14) does not trigger the expected exception.
+             */
+            LinearProblem problem = new LinearProblem(new double[][] {
+                    {  1, 2, -3 },
+                    {  2, 1,  3 },
+                    { -3, 0, -9 }
+            }, new double[] { 1, 1, 1 });
+
+            final Optimum optimum = optimizer.optimize(
+                    problem.getBuilder().maxIterations(20).build());
+
+            //TODO check that it is a bad fit? Why the extra conditions?
+            Assert.assertTrue(FastMath.sqrt(problem.getTarget().length) * optimum.computeRMS() > 0.6);
+
+            optimum.computeCovariances(1.5e-14);
+
+            fail(optimizer);
+        }catch (SingularMatrixException e){
+            //expected
+        }
     }
 
     @Test
@@ -140,23 +112,20 @@ public class LevenbergMarquardtOptimizer
         circle.addPoint(110.0, -20.0);
         circle.addPoint( 35.0,  15.0);
         circle.addPoint( 45.0,  97.0);
-        checkEstimate(circle.getModelFunction(),
-                      circle.getModelFunctionJacobian(),
-                      0.1, 10, 1.0e-14, 1.0e-16, 1.0e-10, false);
-        checkEstimate(circle.getModelFunction(),
-                      circle.getModelFunctionJacobian(),
-                      0.1, 10, 1.0e-15, 1.0e-17, 1.0e-10, true);
-        checkEstimate(circle.getModelFunction(),
-                      circle.getModelFunctionJacobian(),
-                      0.1,  5, 1.0e-15, 1.0e-16, 1.0e-10, true);
+        checkEstimate(
+                circle, 0.1, 10, 1.0e-14, 1.0e-16, 1.0e-10, false);
+        checkEstimate(
+                circle, 0.1, 10, 1.0e-15, 1.0e-17, 1.0e-10, true);
+        checkEstimate(
+                circle, 0.1,  5, 1.0e-15, 1.0e-16, 1.0e-10, true);
         circle.addPoint(300, -300);
-        checkEstimate(circle.getModelFunction(),
-                      circle.getModelFunctionJacobian(),
-                      0.1, 20, 1.0e-18, 1.0e-16, 1.0e-10, true);
+        //wardev I changed true => false
+        //TODO why should this fail? It uses 15 evaluations.
+        checkEstimate(
+                circle, 0.1, 20, 1.0e-18, 1.0e-16, 1.0e-10, false);
     }
 
-    private void checkEstimate(MultivariateVectorFunction problem,
-                               MultivariateMatrixFunction problemJacobian,
+    private void checkEstimate(CircleVectorial circle,
                                double initialStepBoundFactor, int maxCostEval,
                                double costRelativeTolerance, double parRelativeTolerance,
                                double orthoTolerance, boolean shouldFail) {
@@ -166,17 +135,19 @@ public class LevenbergMarquardtOptimizer
                 .withCostRelativeTolerance(costRelativeTolerance)
                 .withParameterRelativeTolerance(parRelativeTolerance)
                 .withOrthoTolerance(orthoTolerance)
-                .withRankingThreshold(Precision.SAFE_MIN)
-                .withMaxEvaluations(maxCostEval)
-                .withMaxIterations(100)
-                .withModelAndJacobian(problem, problemJacobian)
-                .withTarget(new double[] { 0, 0, 0, 0, 0 })
-                .withWeight(new DiagonalMatrix(new double[] { 1, 1, 1, 1, 1 }))
-                .withStartPoint(new double[] { 98.680, 47.345 });
+                .withRankingThreshold(Precision.SAFE_MIN);
+
+            final LeastSquaresProblem problem = builder(circle)
+                    .maxEvaluations(maxCostEval)
+                    .maxIterations(100)
+                    .start(new double[] { 98.680, 47.345 })
+                    .build();
 
-            optimizer.optimize();
+            optimizer.optimize(problem);
 
             Assert.assertTrue(!shouldFail);
+            //TODO check it got the right answer
+
         } catch (DimensionMismatchException ee) {
             Assert.assertTrue(shouldFail);
         } catch (TooManyEvaluationsException ee) {
@@ -191,8 +162,7 @@ public class LevenbergMarquardtOptimizer
      * relaxed for this test to be currently really useful (the issue is under
      * investigation).
      */
-    @Test
-    public void testBevington() {
+    public void testBevington(LeastSquaresOptimizer optimizer) {
         final double[][] dataPoints = {
             // column 1 = times
             { 15, 30, 45, 60, 75, 90, 105, 120, 135, 150,
@@ -209,6 +179,7 @@ public class LevenbergMarquardtOptimizer
               14, 17, 24, 11, 22, 17, 12, 10, 13, 16,
               9, 9, 14, 21, 17, 13, 12, 18, 10, },
         };
+        final double[] start = {10, 900, 80, 27, 225};
 
         final BevingtonProblem problem = new BevingtonProblem();
 
@@ -221,20 +192,19 @@ public class LevenbergMarquardtOptimizer
             weights[i] = 1 / dataPoints[1][i];
         }
 
-        final LevenbergMarquardtOptimizer optimizer = LevenbergMarquardtOptimizer.create()
-            .withMaxEvaluations(100)
-            .withMaxIterations(20)
-            .withModelAndJacobian(problem.getModelFunction(),
-                                  problem.getModelFunctionJacobian())
-            .withTarget(dataPoints[1])
-            .withWeight(new DiagonalMatrix(weights))
-            .withStartPoint(new double[] { 10, 900, 80, 27, 225 });
+        final Optimum optimum = optimizer.optimize(
+                builder(problem)
+                        .target(dataPoints[1])
+                        .weight(new DiagonalMatrix(weights))
+                        .start(start)
+                        .maxIterations(20)
+                        .build()
+        );
 
-        final PointVectorValuePair optimum = optimizer.optimize();
         final double[] solution = optimum.getPoint();
         final double[] expectedSolution = { 10.4, 958.3, 131.4, 33.9, 205.0 };
 
-        final double[][] covarMatrix = optimizer.computeCovariances(solution, 1e-14);
+        final double[][] covarMatrix = optimum.computeCovariances(1e-14);
         final double[][] expectedCovarMatrix = {
             { 3.38, -3.69, 27.98, -2.34, -49.24 },
             { -3.69, 2492.26, 81.89, -69.21, -8.9 },
@@ -263,8 +233,7 @@ public class LevenbergMarquardtOptimizer
         }
     }
 
-    @Test
-    public void testCircleFitting2() {
+    public void testCircleFitting2(LeastSquaresOptimizer optimizer) {
         final double xCenter = 123.456;
         final double yCenter = 654.321;
         final double xSigma = 10;
@@ -286,20 +255,13 @@ public class LevenbergMarquardtOptimizer
         // First guess for the center's coordinates and radius.
         final double[] init = { 90, 659, 115 };
 
-        final LevenbergMarquardtOptimizer optimizer = LevenbergMarquardtOptimizer.create()
-            .withMaxEvaluations(100)
-            .withMaxIterations(50)
-            .withModelAndJacobian(circle.getModelFunction(),
-                                  circle.getModelFunctionJacobian())
-            .withTarget(circle.target())
-            .withWeight(new DiagonalMatrix(circle.weight()))
-            .withStartPoint(init);
+        final Optimum optimum = optimizer.optimize(
+                builder(circle).maxIterations(50).start(init).build());
 
-        final PointVectorValuePair optimum = optimizer.optimize();
         final double[] paramFound = optimum.getPoint();
 
         // Retrieve errors estimation.
-        final double[] asymptoticStandardErrorFound = optimizer.computeSigma(paramFound, 1e-14);
+        final double[] asymptoticStandardErrorFound = optimum.computeSigma(1e-14);
 
         // Check that the parameters are found within the assumed error bars.
         Assert.assertEquals(xCenter, paramFound[0], asymptoticStandardErrorFound[0]);
@@ -307,6 +269,7 @@ public class LevenbergMarquardtOptimizer
         Assert.assertEquals(radius, paramFound[2], asymptoticStandardErrorFound[2]);
     }
 
+    //TODO delete or use
     private static class QuadraticProblem {
         private List<Double> x;
         private List<Double> y;



Mime
View raw message