You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by tn...@apache.org on 2015/04/11 16:06:10 UTC

[5/5] [math] Remove deprecated classes in optim package.

Remove deprecated classes in optim package.


Project: http://git-wip-us.apache.org/repos/asf/commons-math/repo
Commit: http://git-wip-us.apache.org/repos/asf/commons-math/commit/e31fde87
Tree: http://git-wip-us.apache.org/repos/asf/commons-math/tree/e31fde87
Diff: http://git-wip-us.apache.org/repos/asf/commons-math/diff/e31fde87

Branch: refs/heads/master
Commit: e31fde875c6075ae3da9572c6f910cc29ceaf6c3
Parents: 0737cf8
Author: Thomas Neidhart <th...@gmail.com>
Authored: Sat Apr 11 16:05:10 2015 +0200
Committer: Thomas Neidhart <th...@gmail.com>
Committed: Sat Apr 11 16:05:10 2015 +0200

----------------------------------------------------------------------
 ...ltiStartMultivariateVectorOptimizerTest.java |  253 ---
 ...stractLeastSquaresOptimizerAbstractTest.java |  641 --------
 .../AbstractLeastSquaresOptimizerTest.java      |  129 --
 ...ractLeastSquaresOptimizerTestValidation.java |  335 ----
 .../vector/jacobian/CircleProblem.java          |  179 ---
 .../vector/jacobian/CircleVectorial.java        |   99 --
 .../jacobian/GaussNewtonOptimizerTest.java      |  173 ---
 .../LevenbergMarquardtOptimizerTest.java        |  375 -----
 .../nonlinear/vector/jacobian/MinpackTest.java  | 1467 ------------------
 .../jacobian/RandomCirclePointGenerator.java    |   91 --
 .../RandomStraightLinePointGenerator.java       |   99 --
 .../jacobian/StatisticalReferenceDataset.java   |  385 -----
 .../StatisticalReferenceDatasetFactory.java     |  203 ---
 .../vector/jacobian/StraightLineProblem.java    |  169 --
 14 files changed, 4598 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/MultiStartMultivariateVectorOptimizerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/MultiStartMultivariateVectorOptimizerTest.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/MultiStartMultivariateVectorOptimizerTest.java
deleted file mode 100644
index 70b3f95..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/MultiStartMultivariateVectorOptimizerTest.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.math4.optim.nonlinear.vector;
-
-import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
-import org.apache.commons.math4.analysis.MultivariateVectorFunction;
-import org.apache.commons.math4.linear.BlockRealMatrix;
-import org.apache.commons.math4.linear.RealMatrix;
-import org.apache.commons.math4.optim.InitialGuess;
-import org.apache.commons.math4.optim.MaxEval;
-import org.apache.commons.math4.optim.OptimizationData;
-import org.apache.commons.math4.optim.PointVectorValuePair;
-import org.apache.commons.math4.optim.SimpleBounds;
-import org.apache.commons.math4.optim.SimpleVectorValueChecker;
-import org.apache.commons.math4.optim.nonlinear.vector.JacobianMultivariateVectorOptimizer;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunction;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunctionJacobian;
-import org.apache.commons.math4.optim.nonlinear.vector.MultiStartMultivariateVectorOptimizer;
-import org.apache.commons.math4.optim.nonlinear.vector.Target;
-import org.apache.commons.math4.optim.nonlinear.vector.Weight;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.GaussNewtonOptimizer;
-import org.apache.commons.math4.random.GaussianRandomGenerator;
-import org.apache.commons.math4.random.JDKRandomGenerator;
-import org.apache.commons.math4.random.RandomVectorGenerator;
-import org.apache.commons.math4.random.UncorrelatedRandomVectorGenerator;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * <p>Some of the unit tests are re-implementations of the MINPACK <a
- * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
- * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files.
- * The redistribution policy for MINPACK is available <a
- * href="http://www.netlib.org/minpack/disclaimer">here</a>, for
- * convenience, it is reproduced below.</p>
- *
- * <table border="0" width="80%" cellpadding="10" align="center" bgcolor="#E0E0E0">
- * <tr><td>
- *    Minpack Copyright Notice (1999) University of Chicago.
- *    All rights reserved
- * </td></tr>
- * <tr><td>
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * <ol>
- *  <li>Redistributions of source code must retain the above copyright
- *      notice, this list of conditions and the following disclaimer.</li>
- * <li>Redistributions in binary form must reproduce the above
- *     copyright notice, this list of conditions and the following
- *     disclaimer in the documentation and/or other materials provided
- *     with the distribution.</li>
- * <li>The end-user documentation included with the redistribution, if any,
- *     must include the following acknowledgment:
- *     <code>This product includes software developed by the University of
- *           Chicago, as Operator of Argonne National Laboratory.</code>
- *     Alternately, this acknowledgment may appear in the software itself,
- *     if and wherever such third-party acknowledgments normally appear.</li>
- * <li><strong>WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS"
- *     WITHOUT WARRANTY OF ANY KIND. THE COPYRIGHT HOLDER, THE
- *     UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND
- *     THEIR EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR
- *     IMPLIED, INCLUDING BUT NOT LIMITED TO ANY IMPLIED WARRANTIES
- *     OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE
- *     OR NON-INFRINGEMENT, (2) DO NOT ASSUME ANY LEGAL LIABILITY
- *     OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR
- *     USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF
- *     THE SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4)
- *     DO NOT WARRANT THAT THE SOFTWARE WILL FUNCTION
- *     UNINTERRUPTED, THAT IT IS ERROR-FREE OR THAT ANY ERRORS WILL
- *     BE CORRECTED.</strong></li>
- * <li><strong>LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT
- *     HOLDER, THE UNITED STATES, THE UNITED STATES DEPARTMENT OF
- *     ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT,
- *     INCIDENTAL, CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF
- *     ANY KIND OR NATURE, INCLUDING BUT NOT LIMITED TO LOSS OF
- *     PROFITS OR LOSS OF DATA, FOR ANY REASON WHATSOEVER, WHETHER
- *     SUCH LIABILITY IS ASSERTED ON THE BASIS OF CONTRACT, TORT
- *     (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR OTHERWISE,
- *     EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
- *     POSSIBILITY OF SUCH LOSS OR DAMAGES.</strong></li>
- * <ol></td></tr>
- * </table>
- *
- * @author Argonne National Laboratory. MINPACK project. March 1980 (original fortran minpack tests)
- * @author Burton S. Garbow (original fortran minpack tests)
- * @author Kenneth E. Hillstrom (original fortran minpack tests)
- * @author Jorge J. More (original fortran minpack tests)
- * @author Luc Maisonobe (non-minpack tests and minpack tests Java translation)
- */
-@Deprecated
-public class MultiStartMultivariateVectorOptimizerTest {
-
-    @Test(expected=NullPointerException.class)
-    public void testGetOptimaBeforeOptimize() {
-
-        JacobianMultivariateVectorOptimizer underlyingOptimizer
-            = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
-        JDKRandomGenerator g = new JDKRandomGenerator();
-        g.setSeed(16069223052l);
-        RandomVectorGenerator generator
-            = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
-        MultiStartMultivariateVectorOptimizer optimizer
-            = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
-
-        optimizer.getOptima();
-    }
-
-    @Test
-    public void testTrivial() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
-        JacobianMultivariateVectorOptimizer underlyingOptimizer
-            = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
-        JDKRandomGenerator g = new JDKRandomGenerator();
-        g.setSeed(16069223052l);
-        RandomVectorGenerator generator
-            = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
-        MultiStartMultivariateVectorOptimizer optimizer
-            = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
-
-        PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(100),
-                                 problem.getModelFunction(),
-                                 problem.getModelFunctionJacobian(),
-                                 problem.getTarget(),
-                                 new Weight(new double[] { 1 }),
-                                 new InitialGuess(new double[] { 0 }));
-        Assert.assertEquals(1.5, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(3.0, optimum.getValue()[0], 1e-10);
-        PointVectorValuePair[] optima = optimizer.getOptima();
-        Assert.assertEquals(10, optima.length);
-        for (int i = 0; i < optima.length; i++) {
-            Assert.assertEquals(1.5, optima[i].getPoint()[0], 1e-10);
-            Assert.assertEquals(3.0, optima[i].getValue()[0], 1e-10);
-        }
-        Assert.assertTrue(optimizer.getEvaluations() > 20);
-        Assert.assertTrue(optimizer.getEvaluations() < 50);
-        Assert.assertEquals(100, optimizer.getMaxEvaluations());
-    }
-
-    @Test
-    public void testIssue914() {
-        LinearProblem problem = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
-        JacobianMultivariateVectorOptimizer underlyingOptimizer =
-                new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6)) {
-            @Override
-            public PointVectorValuePair optimize(OptimizationData... optData) {
-                // filter out simple bounds, as they are not supported
-                // by the underlying optimizer, and we don't really care for this test
-                OptimizationData[] filtered = optData.clone();
-                for (int i = 0; i < filtered.length; ++i) {
-                    if (filtered[i] instanceof SimpleBounds) {
-                        filtered[i] = null;
-                    }
-                }
-                return super.optimize(filtered);
-            }
-        };
-        JDKRandomGenerator g = new JDKRandomGenerator();
-        g.setSeed(16069223052l);
-        RandomVectorGenerator generator =
-                new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
-        MultiStartMultivariateVectorOptimizer optimizer =
-                new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
-
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           problem.getTarget(),
-                           new Weight(new double[] { 1 }),
-                           new InitialGuess(new double[] { 0 }),
-                           new SimpleBounds(new double[] { -1.0e-10 }, new double[] {  1.0e-10 }));
-        PointVectorValuePair[] optima = optimizer.getOptima();
-        // only the first start should have succeeded
-        Assert.assertEquals(1, optima.length);
-
-    }
-
-    /**
-     * Test demonstrating that the user exception is finally thrown if none
-     * of the runs succeed.
-     */
-    @Test(expected=TestException.class)
-    public void testNoOptimum() {
-        JacobianMultivariateVectorOptimizer underlyingOptimizer
-            = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
-        JDKRandomGenerator g = new JDKRandomGenerator();
-        g.setSeed(12373523445l);
-        RandomVectorGenerator generator
-            = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
-        MultiStartMultivariateVectorOptimizer optimizer
-            = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
-        optimizer.optimize(new MaxEval(100),
-                           new Target(new double[] { 0 }),
-                           new Weight(new double[] { 1 }),
-                           new InitialGuess(new double[] { 0 }),
-                           new ModelFunction(new MultivariateVectorFunction() {
-                                   public double[] value(double[] point) {
-                                       throw new TestException();
-                                   }
-                               }));
-    }
-
-    private static class TestException extends RuntimeException {
-
-    private static final long serialVersionUID = 1L;}
-
-    private static class LinearProblem {
-        private final RealMatrix factors;
-        private final double[] target;
-
-        public LinearProblem(double[][] factors,
-                             double[] target) {
-            this.factors = new BlockRealMatrix(factors);
-            this.target  = target;
-        }
-
-        public Target getTarget() {
-            return new Target(target);
-        }
-
-        public ModelFunction getModelFunction() {
-            return new ModelFunction(new MultivariateVectorFunction() {
-                    public double[] value(double[] variables) {
-                        return factors.operate(variables);
-                    }
-                });
-        }
-
-        public ModelFunctionJacobian getModelFunctionJacobian() {
-            return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
-                    public double[][] value(double[] point) {
-                        return factors.getData();
-                    }
-                });
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerAbstractTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerAbstractTest.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerAbstractTest.java
deleted file mode 100644
index e2814fc..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerAbstractTest.java
+++ /dev/null
@@ -1,641 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.io.IOException;
-import java.util.Arrays;
-
-import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
-import org.apache.commons.math4.analysis.MultivariateVectorFunction;
-import org.apache.commons.math4.exception.ConvergenceException;
-import org.apache.commons.math4.exception.DimensionMismatchException;
-import org.apache.commons.math4.geometry.euclidean.twod.Vector2D;
-import org.apache.commons.math4.linear.BlockRealMatrix;
-import org.apache.commons.math4.linear.RealMatrix;
-import org.apache.commons.math4.optim.InitialGuess;
-import org.apache.commons.math4.optim.MaxEval;
-import org.apache.commons.math4.optim.PointVectorValuePair;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunction;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunctionJacobian;
-import org.apache.commons.math4.optim.nonlinear.vector.Target;
-import org.apache.commons.math4.optim.nonlinear.vector.Weight;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.AbstractLeastSquaresOptimizer;
-import org.apache.commons.math4.util.FastMath;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * <p>Some of the unit tests are re-implementations of the MINPACK <a
- * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
- * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files.
- * The redistribution policy for MINPACK is available <a
- * href="http://www.netlib.org/minpack/disclaimer">here</a>, for
- * convenience, it is reproduced below.</p>
-
- * <table border="0" width="80%" cellpadding="10" align="center" bgcolor="#E0E0E0">
- * <tr><td>
- *    Minpack Copyright Notice (1999) University of Chicago.
- *    All rights reserved
- * </td></tr>
- * <tr><td>
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * <ol>
- *  <li>Redistributions of source code must retain the above copyright
- *      notice, this list of conditions and the following disclaimer.</li>
- * <li>Redistributions in binary form must reproduce the above
- *     copyright notice, this list of conditions and the following
- *     disclaimer in the documentation and/or other materials provided
- *     with the distribution.</li>
- * <li>The end-user documentation included with the redistribution, if any,
- *     must include the following acknowledgment:
- *     <code>This product includes software developed by the University of
- *           Chicago, as Operator of Argonne National Laboratory.</code>
- *     Alternately, this acknowledgment may appear in the software itself,
- *     if and wherever such third-party acknowledgments normally appear.</li>
- * <li><strong>WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS"
- *     WITHOUT WARRANTY OF ANY KIND. THE COPYRIGHT HOLDER, THE
- *     UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND
- *     THEIR EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR
- *     IMPLIED, INCLUDING BUT NOT LIMITED TO ANY IMPLIED WARRANTIES
- *     OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE
- *     OR NON-INFRINGEMENT, (2) DO NOT ASSUME ANY LEGAL LIABILITY
- *     OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR
- *     USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF
- *     THE SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4)
- *     DO NOT WARRANT THAT THE SOFTWARE WILL FUNCTION
- *     UNINTERRUPTED, THAT IT IS ERROR-FREE OR THAT ANY ERRORS WILL
- *     BE CORRECTED.</strong></li>
- * <li><strong>LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT
- *     HOLDER, THE UNITED STATES, THE UNITED STATES DEPARTMENT OF
- *     ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT,
- *     INCIDENTAL, CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF
- *     ANY KIND OR NATURE, INCLUDING BUT NOT LIMITED TO LOSS OF
- *     PROFITS OR LOSS OF DATA, FOR ANY REASON WHATSOEVER, WHETHER
- *     SUCH LIABILITY IS ASSERTED ON THE BASIS OF CONTRACT, TORT
- *     (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR OTHERWISE,
- *     EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
- *     POSSIBILITY OF SUCH LOSS OR DAMAGES.</strong></li>
- * <ol></td></tr>
- * </table>
-
- * @author Argonne National Laboratory. MINPACK project. March 1980 (original fortran minpack tests)
- * @author Burton S. Garbow (original fortran minpack tests)
- * @author Kenneth E. Hillstrom (original fortran minpack tests)
- * @author Jorge J. More (original fortran minpack tests)
- * @author Luc Maisonobe (non-minpack tests and minpack tests Java translation)
- */
-@Deprecated
-public abstract class AbstractLeastSquaresOptimizerAbstractTest {
-
-    public abstract AbstractLeastSquaresOptimizer createOptimizer();
-
-    @Test
-    public void testGetIterations() {
-        AbstractLeastSquaresOptimizer optim = createOptimizer();
-        optim.optimize(new MaxEval(100), new Target(new double[] { 1 }),
-                       new Weight(new double[] { 1 }),
-                       new InitialGuess(new double[] { 3 }),
-                       new ModelFunction(new MultivariateVectorFunction() {
-                               public double[] value(double[] point) {
-                                   return new double[] {
-                                       FastMath.pow(point[0], 4)
-                                   };
-                               }
-                           }),
-                       new ModelFunctionJacobian(new MultivariateMatrixFunction() {
-                               public double[][] value(double[] point) {
-                                   return new double[][] {
-                                       { 0.25 * FastMath.pow(point[0], 3) }
-                                   };
-                               }
-                           }));
-
-        Assert.assertTrue(optim.getIterations() > 0);
-    }
-
-    @Test
-    public void testTrivial() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1 }),
-                               new InitialGuess(new double[] { 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(1.5, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(3.0, optimum.getValue()[0], 1e-10);
-    }
-
-    @Test
-    public void testQRColumnsPermutation() {
-
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, -1 }, { 0, 2 }, { 1, -2 } },
-                                new double[] { 4, 6, 1 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(7, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(3, optimum.getPoint()[1], 1e-10);
-        Assert.assertEquals(4, optimum.getValue()[0], 1e-10);
-        Assert.assertEquals(6, optimum.getValue()[1], 1e-10);
-        Assert.assertEquals(1, optimum.getValue()[2], 1e-10);
-    }
-
-    @Test
-    public void testNoDependency() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 2, 0, 0, 0, 0, 0 },
-                { 0, 2, 0, 0, 0, 0 },
-                { 0, 0, 2, 0, 0, 0 },
-                { 0, 0, 0, 2, 0, 0 },
-                { 0, 0, 0, 0, 2, 0 },
-                { 0, 0, 0, 0, 0, 2 }
-        }, new double[] { 0, 1.1, 2.2, 3.3, 4.4, 5.5 });
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1, 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 0, 0, 0, 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        for (int i = 0; i < problem.target.length; ++i) {
-            Assert.assertEquals(0.55 * i, optimum.getPoint()[i], 1e-10);
-        }
-    }
-
-    @Test
-    public void testOneSet() {
-
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  1,  0, 0 },
-                { -1,  1, 0 },
-                {  0, -1, 1 }
-        }, new double[] { 1, 1, 1});
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(1, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(2, optimum.getPoint()[1], 1e-10);
-        Assert.assertEquals(3, optimum.getPoint()[2], 1e-10);
-    }
-
-    @Test
-    public void testTwoSets() {
-        double epsilon = 1e-7;
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  2,  1,   0,  4,       0, 0 },
-                { -4, -2,   3, -7,       0, 0 },
-                {  4,  1,  -2,  8,       0, 0 },
-                {  0, -3, -12, -1,       0, 0 },
-                {  0,  0,   0,  0, epsilon, 1 },
-                {  0,  0,   0,  0,       1, 1 }
-        }, new double[] { 2, -9, 2, 2, 1 + epsilon * epsilon, 2});
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1, 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 0, 0, 0, 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(3, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(4, optimum.getPoint()[1], 1e-10);
-        Assert.assertEquals(-1, optimum.getPoint()[2], 1e-10);
-        Assert.assertEquals(-2, optimum.getPoint()[3], 1e-10);
-        Assert.assertEquals(1 + epsilon, optimum.getPoint()[4], 1e-10);
-        Assert.assertEquals(1 - epsilon, optimum.getPoint()[5], 1e-10);
-    }
-
-    @Test(expected=ConvergenceException.class)
-    public void testNonInvertible() throws Exception {
-
-        LinearProblem problem = new LinearProblem(new double[][] {
-                {  1, 2, -3 },
-                {  2, 1,  3 },
-                { -3, 0, -9 }
-        }, new double[] { 1, 1, 1 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           problem.getTarget(),
-                           new Weight(new double[] { 1, 1, 1 }),
-                           new InitialGuess(new double[] { 0, 0, 0 }));
-    }
-
-    @Test
-    public void testIllConditioned() {
-        LinearProblem problem1 = new LinearProblem(new double[][] {
-                { 10, 7,  8,  7 },
-                {  7, 5,  6,  5 },
-                {  8, 6, 10,  9 },
-                {  7, 5,  9, 10 }
-        }, new double[] { 32, 23, 33, 31 });
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum1 =
-            optimizer.optimize(new MaxEval(100),
-                               problem1.getModelFunction(),
-                               problem1.getModelFunctionJacobian(),
-                               problem1.getTarget(),
-                               new Weight(new double[] { 1, 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 1, 2, 3 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(1, optimum1.getPoint()[0], 1e-10);
-        Assert.assertEquals(1, optimum1.getPoint()[1], 1e-10);
-        Assert.assertEquals(1, optimum1.getPoint()[2], 1e-10);
-        Assert.assertEquals(1, optimum1.getPoint()[3], 1e-10);
-
-        LinearProblem problem2 = new LinearProblem(new double[][] {
-                { 10.00, 7.00, 8.10, 7.20 },
-                {  7.08, 5.04, 6.00, 5.00 },
-                {  8.00, 5.98, 9.89, 9.00 },
-                {  6.99, 4.99, 9.00, 9.98 }
-        }, new double[] { 32, 23, 33, 31 });
-        PointVectorValuePair optimum2 =
-            optimizer.optimize(new MaxEval(100),
-                               problem2.getModelFunction(),
-                               problem2.getModelFunctionJacobian(),
-                               problem2.getTarget(), 
-                               new Weight(new double[] { 1, 1, 1, 1 }),
-                               new InitialGuess(new double[] { 0, 1, 2, 3 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(-81, optimum2.getPoint()[0], 1e-8);
-        Assert.assertEquals(137, optimum2.getPoint()[1], 1e-8);
-        Assert.assertEquals(-34, optimum2.getPoint()[2], 1e-8);
-        Assert.assertEquals( 22, optimum2.getPoint()[3], 1e-8);
-    }
-
-    @Test
-    public void testMoreEstimatedParametersSimple() {
-
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 3, 2,  0, 0 },
-                { 0, 1, -1, 1 },
-                { 2, 0,  1, 0 }
-        }, new double[] { 7, 3, 5 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           problem.getTarget(),
-                           new Weight(new double[] { 1, 1, 1 }),
-                           new InitialGuess(new double[] { 7, 6, 5, 4 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-    }
-
-    @Test
-    public void testMoreEstimatedParametersUnsorted() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1, 1,  0,  0, 0,  0 },
-                { 0, 0,  1,  1, 1,  0 },
-                { 0, 0,  0,  0, 1, -1 },
-                { 0, 0, -1,  1, 0,  1 },
-                { 0, 0,  0, -1, 1,  0 }
-       }, new double[] { 3, 12, -1, 7, 1 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1, 1, 1 }),
-                               new InitialGuess(new double[] { 2, 2, 2, 2, 2, 2 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(3, optimum.getPointRef()[2], 1e-10);
-        Assert.assertEquals(4, optimum.getPointRef()[3], 1e-10);
-        Assert.assertEquals(5, optimum.getPointRef()[4], 1e-10);
-        Assert.assertEquals(6, optimum.getPointRef()[5], 1e-10);
-    }
-
-    @Test
-    public void testRedundantEquations() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1,  1 },
-                { 1, -1 },
-                { 1,  3 }
-        }, new double[] { 3, 1, 5 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1, 1 }),
-                               new InitialGuess(new double[] { 1, 1 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(2, optimum.getPointRef()[0], 1e-10);
-        Assert.assertEquals(1, optimum.getPointRef()[1], 1e-10);
-    }
-
-    @Test
-    public void testInconsistentEquations() {
-        LinearProblem problem = new LinearProblem(new double[][] {
-                { 1,  1 },
-                { 1, -1 },
-                { 1,  3 }
-        }, new double[] { 3, 1, 4 });
-
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           problem.getTarget(),
-                           new Weight(new double[] { 1, 1, 1 }),
-                           new InitialGuess(new double[] { 1, 1 }));
-        Assert.assertTrue(optimizer.getRMS() > 0.1);
-    }
-
-    @Test(expected=DimensionMismatchException.class)
-    public void testInconsistentSizes1() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, 0 }, { 0, 1 } },
-                                new double[] { -1, 1 });
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               problem.getModelFunction(),
-                               problem.getModelFunctionJacobian(),
-                               problem.getTarget(),
-                               new Weight(new double[] { 1, 1 }),
-                               new InitialGuess(new double[] { 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(-1, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
-
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           problem.getTarget(),
-                           new Weight(new double[] { 1 }),
-                           new InitialGuess(new double[] { 0, 0 }));
-    }
-
-    @Test(expected=DimensionMismatchException.class)
-    public void testInconsistentSizes2() {
-        LinearProblem problem
-            = new LinearProblem(new double[][] { { 1, 0 }, { 0, 1 } },
-                                new double[] { -1, 1 });
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(100),
-                                 problem.getModelFunction(),
-                                 problem.getModelFunctionJacobian(),
-                                 problem.getTarget(),
-                                 new Weight(new double[] { 1, 1 }),
-                                 new InitialGuess(new double[] { 0, 0 }));
-        Assert.assertEquals(0, optimizer.getRMS(), 1e-10);
-        Assert.assertEquals(-1, optimum.getPoint()[0], 1e-10);
-        Assert.assertEquals(1, optimum.getPoint()[1], 1e-10);
-
-        optimizer.optimize(new MaxEval(100),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           new Target(new double[] { 1 }),
-                           new Weight(new double[] { 1 }),
-                           new InitialGuess(new double[] { 0, 0 }));
-    }
-
-    @Test
-    public void testCircleFitting() {
-        CircleVectorial circle = new CircleVectorial();
-        circle.addPoint( 30,  68);
-        circle.addPoint( 50,  -6);
-        circle.addPoint(110, -20);
-        circle.addPoint( 35,  15);
-        circle.addPoint( 45,  97);
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(100),
-                                 circle.getModelFunction(),
-                                 circle.getModelFunctionJacobian(),
-                                 new Target(new double[] { 0, 0, 0, 0, 0 }),
-                                 new Weight(new double[] { 1, 1, 1, 1, 1 }),
-                                 new InitialGuess(new double[] { 98.680, 47.345 }));
-        Assert.assertTrue(optimizer.getEvaluations() < 10);
-        double rms = optimizer.getRMS();
-        Assert.assertEquals(1.768262623567235,  FastMath.sqrt(circle.getN()) * rms,  1e-10);
-        Vector2D center = new Vector2D(optimum.getPointRef()[0], optimum.getPointRef()[1]);
-        Assert.assertEquals(69.96016176931406, circle.getRadius(center), 1e-6);
-        Assert.assertEquals(96.07590211815305, center.getX(),            1e-6);
-        Assert.assertEquals(48.13516790438953, center.getY(),            1e-6);
-        double[][] cov = optimizer.computeCovariances(optimum.getPoint(), 1e-14);
-        Assert.assertEquals(1.839, cov[0][0], 0.001);
-        Assert.assertEquals(0.731, cov[0][1], 0.001);
-        Assert.assertEquals(cov[0][1], cov[1][0], 1e-14);
-        Assert.assertEquals(0.786, cov[1][1], 0.001);
-
-        // add perfect measurements and check errors are reduced
-        double  r = circle.getRadius(center);
-        for (double d= 0; d < 2 * FastMath.PI; d += 0.01) {
-            circle.addPoint(center.getX() + r * FastMath.cos(d), center.getY() + r * FastMath.sin(d));
-        }
-        double[] target = new double[circle.getN()];
-        Arrays.fill(target, 0);
-        double[] weights = new double[circle.getN()];
-        Arrays.fill(weights, 2);
-        optimum = optimizer.optimize(new MaxEval(100),
-                                     circle.getModelFunction(),
-                                     circle.getModelFunctionJacobian(),
-                                     new Target(target),
-                                     new Weight(weights),
-                                     new InitialGuess(new double[] { 98.680, 47.345 }));
-        cov = optimizer.computeCovariances(optimum.getPoint(), 1e-14);
-        Assert.assertEquals(0.0016, cov[0][0], 0.001);
-        Assert.assertEquals(3.2e-7, cov[0][1], 1e-9);
-        Assert.assertEquals(cov[0][1], cov[1][0], 1e-14);
-        Assert.assertEquals(0.0016, cov[1][1], 0.001);
-    }
-
-    @Test
-    public void testCircleFittingBadInit() {
-        CircleVectorial circle = new CircleVectorial();
-        double[][] points = circlePoints;
-        double[] target = new double[points.length];
-        Arrays.fill(target, 0);
-        double[] weights = new double[points.length];
-        Arrays.fill(weights, 2);
-        for (int i = 0; i < points.length; ++i) {
-            circle.addPoint(points[i][0], points[i][1]);
-        }
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(100),
-                                 circle.getModelFunction(),
-                                 circle.getModelFunctionJacobian(),
-                                 new Target(target),
-                                 new Weight(weights),
-                                 new InitialGuess(new double[] { -12, -12 }));
-        Vector2D center = new Vector2D(optimum.getPointRef()[0], optimum.getPointRef()[1]);
-        Assert.assertTrue(optimizer.getEvaluations() < 25);
-        Assert.assertEquals( 0.043, optimizer.getRMS(), 1e-3);
-        Assert.assertEquals( 0.292235,  circle.getRadius(center), 1e-6);
-        Assert.assertEquals(-0.151738,  center.getX(),            1e-6);
-        Assert.assertEquals( 0.2075001, center.getY(),            1e-6);
-    }
-
-    @Test
-    public void testCircleFittingGoodInit() {
-        CircleVectorial circle = new CircleVectorial();
-        double[][] points = circlePoints;
-        double[] target = new double[points.length];
-        Arrays.fill(target, 0);
-        double[] weights = new double[points.length];
-        Arrays.fill(weights, 2);
-        for (int i = 0; i < points.length; ++i) {
-            circle.addPoint(points[i][0], points[i][1]);
-        }
-        AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        PointVectorValuePair optimum =
-            optimizer.optimize(new MaxEval(100),
-                               circle.getModelFunction(),
-                               circle.getModelFunctionJacobian(),
-                               new Target(target),
-                               new Weight(weights),
-                               new InitialGuess(new double[] { 0, 0 }));
-        Assert.assertEquals(-0.1517383071957963, optimum.getPointRef()[0], 1e-6);
-        Assert.assertEquals(0.2074999736353867,  optimum.getPointRef()[1], 1e-6);
-        Assert.assertEquals(0.04268731682389561, optimizer.getRMS(),       1e-8);
-    }
-
-    private final double[][] circlePoints = new double[][] {
-        {-0.312967,  0.072366}, {-0.339248,  0.132965}, {-0.379780,  0.202724},
-        {-0.390426,  0.260487}, {-0.361212,  0.328325}, {-0.346039,  0.392619},
-        {-0.280579,  0.444306}, {-0.216035,  0.470009}, {-0.149127,  0.493832},
-        {-0.075133,  0.483271}, {-0.007759,  0.452680}, { 0.060071,  0.410235},
-        { 0.103037,  0.341076}, { 0.118438,  0.273884}, { 0.131293,  0.192201},
-        { 0.115869,  0.129797}, { 0.072223,  0.058396}, { 0.022884,  0.000718},
-        {-0.053355, -0.020405}, {-0.123584, -0.032451}, {-0.216248, -0.032862},
-        {-0.278592, -0.005008}, {-0.337655,  0.056658}, {-0.385899,  0.112526},
-        {-0.405517,  0.186957}, {-0.415374,  0.262071}, {-0.387482,  0.343398},
-        {-0.347322,  0.397943}, {-0.287623,  0.458425}, {-0.223502,  0.475513},
-        {-0.135352,  0.478186}, {-0.061221,  0.483371}, { 0.003711,  0.422737},
-        { 0.065054,  0.375830}, { 0.108108,  0.297099}, { 0.123882,  0.222850},
-        { 0.117729,  0.134382}, { 0.085195,  0.056820}, { 0.029800, -0.019138},
-        {-0.027520, -0.072374}, {-0.102268, -0.091555}, {-0.200299, -0.106578},
-        {-0.292731, -0.091473}, {-0.356288, -0.051108}, {-0.420561,  0.014926},
-        {-0.471036,  0.074716}, {-0.488638,  0.182508}, {-0.485990,  0.254068},
-        {-0.463943,  0.338438}, {-0.406453,  0.404704}, {-0.334287,  0.466119},
-        {-0.254244,  0.503188}, {-0.161548,  0.495769}, {-0.075733,  0.495560},
-        { 0.001375,  0.434937}, { 0.082787,  0.385806}, { 0.115490,  0.323807},
-        { 0.141089,  0.223450}, { 0.138693,  0.131703}, { 0.126415,  0.049174},
-        { 0.066518, -0.010217}, {-0.005184, -0.070647}, {-0.080985, -0.103635},
-        {-0.177377, -0.116887}, {-0.260628, -0.100258}, {-0.335756, -0.056251},
-        {-0.405195, -0.000895}, {-0.444937,  0.085456}, {-0.484357,  0.175597},
-        {-0.472453,  0.248681}, {-0.438580,  0.347463}, {-0.402304,  0.422428},
-        {-0.326777,  0.479438}, {-0.247797,  0.505581}, {-0.152676,  0.519380},
-        {-0.071754,  0.516264}, { 0.015942,  0.472802}, { 0.076608,  0.419077},
-        { 0.127673,  0.330264}, { 0.159951,  0.262150}, { 0.153530,  0.172681},
-        { 0.140653,  0.089229}, { 0.078666,  0.024981}, { 0.023807, -0.037022},
-        {-0.048837, -0.077056}, {-0.127729, -0.075338}, {-0.221271, -0.067526}
-    };
-
-    public void doTestStRD(final StatisticalReferenceDataset dataset,
-                           final double errParams,
-                           final double errParamsSd) {
-        final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        final double[] w = new double[dataset.getNumObservations()];
-        Arrays.fill(w, 1);
-
-        final double[][] data = dataset.getData();
-        final double[] initial = dataset.getStartingPoint(0);
-        final StatisticalReferenceDataset.LeastSquaresProblem problem = dataset.getLeastSquaresProblem();
-        final PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(100),
-                                 problem.getModelFunction(),
-                                 problem.getModelFunctionJacobian(),
-                                 new Target(data[1]),
-                                 new Weight(w),
-                                 new InitialGuess(initial));
-
-        final double[] actual = optimum.getPoint();
-        for (int i = 0; i < actual.length; i++) {
-            double expected = dataset.getParameter(i);
-            double delta = FastMath.abs(errParams * expected);
-            Assert.assertEquals(dataset.getName() + ", param #" + i,
-                                expected, actual[i], delta);
-        }
-    }
-
-    @Test
-    public void testKirby2() throws IOException {
-        doTestStRD(StatisticalReferenceDatasetFactory.createKirby2(), 1E-7, 1E-7);
-    }
-
-    @Test
-    public void testHahn1() throws IOException {
-        doTestStRD(StatisticalReferenceDatasetFactory.createHahn1(), 1E-7, 1E-4);
-    }
-
-    static class LinearProblem {
-        private final RealMatrix factors;
-        private final double[] target;
-
-        public LinearProblem(double[][] factors, double[] target) {
-            this.factors = new BlockRealMatrix(factors);
-            this.target  = target;
-        }
-
-        public Target getTarget() {
-            return new Target(target);
-        }
-
-        public ModelFunction getModelFunction() {
-            return new ModelFunction(new MultivariateVectorFunction() {
-                    public double[] value(double[] params) {
-                        return factors.operate(params);
-                    }
-                });
-        }
-
-        public ModelFunctionJacobian getModelFunctionJacobian() {
-            return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
-                    public double[][] value(double[] params) {
-                        return factors.getData();
-                    }
-                });
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTest.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTest.java
deleted file mode 100644
index aad5e43..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
- * or agreed to in writing, software distributed under the License is
- * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the specific language
- * governing permissions and limitations under the License.
- */
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.io.IOException;
-import java.util.Arrays;
-
-import org.apache.commons.math4.optim.InitialGuess;
-import org.apache.commons.math4.optim.MaxEval;
-import org.apache.commons.math4.optim.PointVectorValuePair;
-import org.apache.commons.math4.optim.nonlinear.vector.Target;
-import org.apache.commons.math4.optim.nonlinear.vector.Weight;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.AbstractLeastSquaresOptimizer;
-import org.apache.commons.math4.util.FastMath;
-import org.junit.Test;
-import org.junit.Assert;
-
-@Deprecated
-public class AbstractLeastSquaresOptimizerTest {
-
-    public static AbstractLeastSquaresOptimizer createOptimizer() {
-        return new AbstractLeastSquaresOptimizer(null) {
-
-            @Override
-            protected PointVectorValuePair doOptimize() {
-                final double[] params = getStartPoint();
-                final double[] res = computeResiduals(computeObjectiveValue(params));
-                setCost(computeCost(res));
-                return new PointVectorValuePair(params, null);
-            }
-        };
-    }
-
-    @Test
-    public void testGetChiSquare() throws IOException {
-        final StatisticalReferenceDataset dataset
-            = StatisticalReferenceDatasetFactory.createKirby2();
-        final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1.0);
-
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
-
-        optimizer.optimize(new MaxEval(1),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           new Target(y),
-                           new Weight(w),
-                           new InitialGuess(a));
-        final double expected = dataset.getResidualSumOfSquares();
-        final double actual = optimizer.getChiSquare();
-        Assert.assertEquals(dataset.getName(), expected, actual,
-                            1E-11 * expected);
-    }
-
-    @Test
-    public void testGetRMS() throws IOException {
-        final StatisticalReferenceDataset dataset
-            = StatisticalReferenceDatasetFactory.createKirby2();
-        final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1);
-
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
-
-        optimizer.optimize(new MaxEval(1),
-                           problem.getModelFunction(),
-                           problem.getModelFunctionJacobian(),
-                           new Target(y),
-                           new Weight(w),
-                           new InitialGuess(a));
-
-        final double expected = FastMath
-            .sqrt(dataset.getResidualSumOfSquares() /
-                  dataset.getNumObservations());
-        final double actual = optimizer.getRMS();
-        Assert.assertEquals(dataset.getName(), expected, actual,
-                            1E-11 * expected);
-    }
-
-    @Test
-    public void testComputeSigma() throws IOException {
-        final StatisticalReferenceDataset dataset
-            = StatisticalReferenceDatasetFactory.createKirby2();
-        final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
-        final double[] a = dataset.getParameters();
-        final double[] y = dataset.getData()[1];
-        final double[] w = new double[y.length];
-        Arrays.fill(w, 1);
-
-        StatisticalReferenceDataset.LeastSquaresProblem problem
-            = dataset.getLeastSquaresProblem();
-
-        final PointVectorValuePair optimum
-            = optimizer.optimize(new MaxEval(1),
-                                 problem.getModelFunction(),
-                                 problem.getModelFunctionJacobian(),
-                                 new Target(y),
-                                 new Weight(w),
-                                 new InitialGuess(a));
-
-        final double[] sig = optimizer.computeSigma(optimum.getPoint(), 1e-14);
-
-        final int dof = y.length - a.length;
-        final double[] expected = dataset.getParametersStandardDeviations();
-        for (int i = 0; i < sig.length; i++) {
-            final double actual = FastMath.sqrt(optimizer.getChiSquare() / dof) * sig[i];
-            Assert.assertEquals(dataset.getName() + ", parameter #" + i,
-                                expected[i], actual, 1e-6 * expected[i]);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTestValidation.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTestValidation.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTestValidation.java
deleted file mode 100644
index 9235e6b..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/AbstractLeastSquaresOptimizerTestValidation.java
+++ /dev/null
@@ -1,335 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
- * or agreed to in writing, software distributed under the License is
- * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the specific language
- * governing permissions and limitations under the License.
- */
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.ArrayList;
-import java.awt.geom.Point2D;
-
-import org.apache.commons.math4.optim.InitialGuess;
-import org.apache.commons.math4.optim.MaxEval;
-import org.apache.commons.math4.optim.PointVectorValuePair;
-import org.apache.commons.math4.optim.nonlinear.vector.Target;
-import org.apache.commons.math4.optim.nonlinear.vector.Weight;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.AbstractLeastSquaresOptimizer;
-import org.apache.commons.math4.stat.descriptive.StatisticalSummary;
-import org.apache.commons.math4.stat.descriptive.SummaryStatistics;
-import org.apache.commons.math4.util.FastMath;
-import org.junit.Test;
-import org.junit.Assert;
-
-/**
- * This class demonstrates the main functionality of the
- * {@link AbstractLeastSquaresOptimizer}, common to the
- * optimizer implementations in package
- * {@link org.apache.commons.math4.optimization.general}.
- * <br/>
- * Not enabled by default, as the class name does not end with "Test".
- * <br/>
- * Invoke by running
- * <pre><code>
- *  mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation
- * </code></pre>
- * or by running
- * <pre><code>
- *  mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation -DargLine="-DmcRuns=1234 -server"
- * </code></pre>
- */
-@Deprecated
-public class AbstractLeastSquaresOptimizerTestValidation {
-    private static final int MONTE_CARLO_RUNS = Integer.parseInt(System.getProperty("mcRuns",
-                                                                                    "100"));
-
-    /**
-     * Using a Monte-Carlo procedure, this test checks the error estimations
-     * as provided by the square-root of the diagonal elements of the
-     * covariance matrix.
-     * <br/>
-     * The test generates sets of observations, each sampled from
-     * a Gaussian distribution.
-     * <br/>
-     * The optimization problem solved is defined in class
-     * {@link StraightLineProblem}.
-     * <br/>
-     * The output (on stdout) will be a table summarizing the distribution
-     * of parameters generated by the Monte-Carlo process and by the direct
-     * estimation provided by the diagonal elements of the covariance matrix.
-     */
-    @Test
-    public void testParametersErrorMonteCarloObservations() {
-        // Error on the observations.
-        final double yError = 15;
-
-        // True values of the parameters.
-        final double slope = 123.456;
-        final double offset = -98.765;
-
-        // Samples generator.
-        final RandomStraightLinePointGenerator lineGenerator
-            = new RandomStraightLinePointGenerator(slope, offset,
-                                                   yError,
-                                                   -1e3, 1e4,
-                                                   138577L);
-
-        // Number of observations.
-        final int numObs = 100; // XXX Should be a command-line option.
-        // number of parameters.
-        final int numParams = 2;
-
-        // Parameters found for each of Monte-Carlo run.
-        final SummaryStatistics[] paramsFoundByDirectSolution = new SummaryStatistics[numParams];
-        // Sigma estimations (square-root of the diagonal elements of the
-        // covariance matrix), for each Monte-Carlo run.
-        final SummaryStatistics[] sigmaEstimate = new SummaryStatistics[numParams];
-
-        // Initialize statistics accumulators.
-        for (int i = 0; i < numParams; i++) {
-            paramsFoundByDirectSolution[i] = new SummaryStatistics();
-            sigmaEstimate[i] = new SummaryStatistics();
-        }
-
-        // Dummy optimizer (to compute the covariance matrix).
-        final AbstractLeastSquaresOptimizer optim = new DummyOptimizer();
-        final double[] init = { slope, offset };
-
-        // Monte-Carlo (generates many sets of observations).
-        final int mcRepeat = MONTE_CARLO_RUNS;
-        int mcCount = 0;
-        while (mcCount < mcRepeat) {
-            // Observations.
-            final Point2D.Double[] obs = lineGenerator.generate(numObs);
-
-            final StraightLineProblem problem = new StraightLineProblem(yError);
-            for (int i = 0; i < numObs; i++) {
-                final Point2D.Double p = obs[i];
-                problem.addPoint(p.x, p.y);
-            }
-
-            // Direct solution (using simple regression).
-            final double[] regress = problem.solve();
-
-            // Estimation of the standard deviation (diagonal elements of the
-            // covariance matrix).
-            final PointVectorValuePair optimum
-                = optim.optimize(new MaxEval(Integer.MAX_VALUE),
-                                 problem.getModelFunction(),
-                                 problem.getModelFunctionJacobian(),
-                                 new Target(problem.target()),
-                                 new Weight(problem.weight()),
-                                 new InitialGuess(init));
-            final double[] sigma = optim.computeSigma(optimum.getPoint(), 1e-14);
-
-            // Accumulate statistics.
-            for (int i = 0; i < numParams; i++) {
-                paramsFoundByDirectSolution[i].addValue(regress[i]);
-                sigmaEstimate[i].addValue(sigma[i]);
-            }
-
-            // Next Monte-Carlo.
-            ++mcCount;
-        }
-
-        // Print statistics.
-        final String line = "--------------------------------------------------------------";
-        System.out.println("                 True value       Mean        Std deviation");
-        for (int i = 0; i < numParams; i++) {
-            System.out.println(line);
-            System.out.println("Parameter #" + i);
-
-            StatisticalSummary s = paramsFoundByDirectSolution[i].getSummary();
-            System.out.printf("              %+.6e   %+.6e   %+.6e\n",
-                              init[i],
-                              s.getMean(),
-                              s.getStandardDeviation());
-
-            s = sigmaEstimate[i].getSummary();
-            System.out.printf("sigma: %+.6e (%+.6e)\n",
-                              s.getMean(),
-                              s.getStandardDeviation());
-        }
-        System.out.println(line);
-
-        // Check the error estimation.
-        for (int i = 0; i < numParams; i++) {
-            Assert.assertEquals(paramsFoundByDirectSolution[i].getSummary().getStandardDeviation(),
-                                sigmaEstimate[i].getSummary().getMean(),
-                                8e-2);
-        }
-    }
-
-    /**
-     * In this test, the set of observations is fixed.
-     * Using a Monte-Carlo procedure, it generates sets of parameters,
-     * and determine the parameter change that will result in the
-     * normalized chi-square becoming larger by one than the value from
-     * the best fit solution.
-     * <br/>
-     * The optimization problem solved is defined in class
-     * {@link StraightLineProblem}.
-     * <br/>
-     * The output (on stdout) will be a list of lines containing:
-     * <ul>
-     *  <li>slope of the straight line,</li>
-     *  <li>intercept of the straight line,</li>
-     *  <li>chi-square of the solution defined by the above two values.</li>
-     * </ul>
-     * The output is separated into two blocks (with a blank line between
-     * them); the first block will contain all parameter sets for which
-     * {@code chi2 < chi2_b + 1}
-     * and the second block, all sets for which
-     * {@code chi2 >= chi2_b + 1}
-     * where {@code chi2_b} is the lowest chi-square (corresponding to the
-     * best solution).
-     */
-    @Test
-    public void testParametersErrorMonteCarloParameters() {
-        // Error on the observations.
-        final double yError = 15;
-
-        // True values of the parameters.
-        final double slope = 123.456;
-        final double offset = -98.765;
-
-        // Samples generator.
-        final RandomStraightLinePointGenerator lineGenerator
-            = new RandomStraightLinePointGenerator(slope, offset,
-                                                   yError,
-                                                   -1e3, 1e4,
-                                                   13839013L);
-
-        // Number of observations.
-        final int numObs = 10;
-
-        // Create a single set of observations.
-        final Point2D.Double[] obs = lineGenerator.generate(numObs);
-
-        final StraightLineProblem problem = new StraightLineProblem(yError);
-        for (int i = 0; i < numObs; i++) {
-            final Point2D.Double p = obs[i];
-            problem.addPoint(p.x, p.y);
-        }
-
-        // Direct solution (using simple regression).
-        final double[] regress = problem.solve();
-
-        // Dummy optimizer (to compute the chi-square).
-        final AbstractLeastSquaresOptimizer optim = new DummyOptimizer();
-        // Get chi-square of the best parameters set for the given set of
-        // observations.
-        final double bestChi2N = getChi2N(optim, problem, regress);
-        final double[] sigma = optim.computeSigma(regress, 1e-14);
-
-        // Monte-Carlo (generates a grid of parameters).
-        final int mcRepeat = MONTE_CARLO_RUNS;
-        final int gridSize = (int) FastMath.sqrt(mcRepeat);
-
-        // Parameters found for each of Monte-Carlo run.
-        // Index 0 = slope
-        // Index 1 = offset
-        // Index 2 = normalized chi2
-        final List<double[]> paramsAndChi2 = new ArrayList<double[]>(gridSize * gridSize);
-
-        final double slopeRange = 10 * sigma[0];
-        final double offsetRange = 10 * sigma[1];
-        final double minSlope = slope - 0.5 * slopeRange;
-        final double minOffset = offset - 0.5 * offsetRange;
-        final double deltaSlope =  slopeRange/ gridSize;
-        final double deltaOffset = offsetRange / gridSize;
-        for (int i = 0; i < gridSize; i++) {
-            final double s = minSlope + i * deltaSlope;
-            for (int j = 0; j < gridSize; j++) {
-                final double o = minOffset + j * deltaOffset;
-                final double chi2N = getChi2N(optim, problem, new double[] {s, o});
-
-                paramsAndChi2.add(new double[] {s, o, chi2N});
-            }
-        }
-
-        // Output (for use with "gnuplot").
-
-        // Some info.
-
-        // For plotting separately sets of parameters that have a large chi2.
-        final double chi2NPlusOne = bestChi2N + 1;
-        int numLarger = 0;
-
-        final String lineFmt = "%+.10e %+.10e   %.8e\n";
-
-        // Point with smallest chi-square.
-        System.out.printf(lineFmt, regress[0], regress[1], bestChi2N);
-        System.out.println(); // Empty line.
-
-        // Points within the confidence interval.
-        for (double[] d : paramsAndChi2) {
-            if (d[2] <= chi2NPlusOne) {
-                System.out.printf(lineFmt, d[0], d[1], d[2]);
-            }
-        }
-        System.out.println(); // Empty line.
-
-        // Points outside the confidence interval.
-        for (double[] d : paramsAndChi2) {
-            if (d[2] > chi2NPlusOne) {
-                ++numLarger;
-                System.out.printf(lineFmt, d[0], d[1], d[2]);
-            }
-        }
-        System.out.println(); // Empty line.
-
-        System.out.println("# sigma=" + Arrays.toString(sigma));
-        System.out.println("# " + numLarger + " sets filtered out");
-    }
-
-    /**
-     * @return the normalized chi-square.
-     */
-    private double getChi2N(AbstractLeastSquaresOptimizer optim,
-                            StraightLineProblem problem,
-                            double[] params) {
-        final double[] t = problem.target();
-        final double[] w = problem.weight();
-
-        optim.optimize(new MaxEval(Integer.MAX_VALUE),
-                       problem.getModelFunction(),
-                       problem.getModelFunctionJacobian(),
-                       new Target(t),
-                       new Weight(w),
-                       new InitialGuess(params));
-
-        return optim.getChiSquare() / (t.length - params.length);
-    }
-}
-
-/**
- * A dummy optimizer.
- * Used for computing the covariance matrix.
- */
-@Deprecated
-class DummyOptimizer extends AbstractLeastSquaresOptimizer {
-    public DummyOptimizer() {
-        super(null);
-    }
-
-    /**
-     * This method does nothing and returns a dummy value.
-     */
-    @Override
-    public PointVectorValuePair doOptimize() {
-        final double[] params = getStartPoint();
-        final double[] res = computeResiduals(computeObjectiveValue(params));
-        setCost(computeCost(res));
-        return new PointVectorValuePair(params, null);
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleProblem.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleProblem.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleProblem.java
deleted file mode 100644
index 9458fe8..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleProblem.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.util.ArrayList;
-
-import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
-import org.apache.commons.math4.analysis.MultivariateVectorFunction;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunction;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunctionJacobian;
-import org.apache.commons.math4.util.FastMath;
-import org.apache.commons.math4.util.MathUtils;
-
-/**
- * Class that models a circle.
- * The parameters of problem are:
- * <ul>
- *  <li>the x-coordinate of the circle center,</li>
- *  <li>the y-coordinate of the circle center,</li>
- *  <li>the radius of the circle.</li>
- * </ul>
- * The model functions are:
- * <ul>
- *  <li>for each triplet (cx, cy, r), the (x, y) coordinates of a point on the
- *   corresponding circle.</li>
- * </ul>
- */
-@Deprecated
-class CircleProblem {
-    /** Cloud of points assumed to be fitted by a circle. */
-    private final ArrayList<double[]> points;
-    /** Error on the x-coordinate of the points. */
-    private final double xSigma;
-    /** Error on the y-coordinate of the points. */
-    private final double ySigma;
-    /** Number of points on the circumference (when searching which
-        model point is closest to a given "observation". */
-    private final int resolution;
-
-    /**
-     * @param xError Assumed error for the x-coordinate of the circle points.
-     * @param yError Assumed error for the y-coordinate of the circle points.
-     * @param searchResolution Number of points to try when searching the one
-     * that is closest to a given "observed" point.
-     */
-    public CircleProblem(double xError,
-                         double yError,
-                         int searchResolution) {
-        points = new ArrayList<double[]>();
-        xSigma = xError;
-        ySigma = yError;
-        resolution = searchResolution;
-    }
-
-    /**
-     * @param xError Assumed error for the x-coordinate of the circle points.
-     * @param yError Assumed error for the y-coordinate of the circle points.
-     */
-    public CircleProblem(double xError,
-                         double yError) {
-        this(xError, yError, 500);
-    }
-
-    public void addPoint(double px, double py) {
-        points.add(new double[] { px, py });
-    }
-
-    public double[] target() {
-        final double[] t = new double[points.size() * 2];
-        for (int i = 0; i < points.size(); i++) {
-            final double[] p = points.get(i);
-            final int index = i * 2;
-            t[index] = p[0];
-            t[index + 1] = p[1];
-        }
-
-        return t;
-    }
-
-    public double[] weight() {
-        final double wX = 1 / (xSigma * xSigma);
-        final double wY = 1 / (ySigma * ySigma);
-        final double[] w = new double[points.size() * 2];
-        for (int i = 0; i < points.size(); i++) {
-            final int index = i * 2;
-            w[index] = wX;
-            w[index + 1] = wY;
-        }
-
-        return w;
-    }
-
-    public ModelFunction getModelFunction() {
-        return new ModelFunction(new MultivariateVectorFunction() {
-                public double[] value(double[] params) {
-                    final double cx = params[0];
-                    final double cy = params[1];
-                    final double r = params[2];
-
-                    final double[] model = new double[points.size() * 2];
-
-                    final double deltaTheta = MathUtils.TWO_PI / resolution;
-                    for (int i = 0; i < points.size(); i++) {
-                        final double[] p = points.get(i);
-                        final double px = p[0];
-                        final double py = p[1];
-
-                        double bestX = 0;
-                        double bestY = 0;
-                        double dMin = Double.POSITIVE_INFINITY;
-
-                        // Find the angle for which the circle passes closest to the
-                        // current point (using a resolution of 100 points along the
-                        // circumference).
-                        for (double theta = 0; theta <= MathUtils.TWO_PI; theta += deltaTheta) {
-                            final double currentX = cx + r * FastMath.cos(theta);
-                            final double currentY = cy + r * FastMath.sin(theta);
-                            final double dX = currentX - px;
-                            final double dY = currentY - py;
-                            final double d = dX * dX + dY * dY;
-                            if (d < dMin) {
-                                dMin = d;
-                                bestX = currentX;
-                                bestY = currentY;
-                            }
-                        }
-
-                        final int index = i * 2;
-                        model[index] = bestX;
-                        model[index + 1] = bestY;
-                    }
-
-                    return model;
-                }
-            });
-    }
-
-    public ModelFunctionJacobian getModelFunctionJacobian() {
-        return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
-                public double[][] value(double[] point) {
-                    return jacobian(point);
-                }
-        });
-    }
-
-    private double[][] jacobian(double[] params) {
-        final double[][] jacobian = new double[points.size() * 2][3];
-
-        for (int i = 0; i < points.size(); i++) {
-            final int index = i * 2;
-            // Partial derivative wrt x-coordinate of center. 
-            jacobian[index][0] = 1;
-            jacobian[index + 1][0] = 0;
-            // Partial derivative wrt y-coordinate of center.
-            jacobian[index][1] = 0;
-            jacobian[index + 1][1] = 1;
-            // Partial derivative wrt radius.
-            final double[] p = points.get(i);
-            jacobian[index][2] = (p[0] - params[0]) / params[2];
-            jacobian[index + 1][2] = (p[1] - params[1]) / params[2];
-        }
-
-        return jacobian;
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleVectorial.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleVectorial.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleVectorial.java
deleted file mode 100644
index 7b6a310..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/CircleVectorial.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.util.ArrayList;
-
-import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
-import org.apache.commons.math4.analysis.MultivariateVectorFunction;
-import org.apache.commons.math4.geometry.euclidean.twod.Vector2D;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunction;
-import org.apache.commons.math4.optim.nonlinear.vector.ModelFunctionJacobian;
-
-/**
- * Class used in the tests.
- */
-@Deprecated
-class CircleVectorial {
-    private ArrayList<Vector2D> points;
-
-    public CircleVectorial() {
-        points  = new ArrayList<Vector2D>();
-    }
-
-    public void addPoint(double px, double py) {
-        points.add(new Vector2D(px, py));
-    }
-
-    public int getN() {
-        return points.size();
-    }
-
-    public double getRadius(Vector2D center) {
-        double r = 0;
-        for (Vector2D point : points) {
-            r += point.distance(center);
-        }
-        return r / points.size();
-    }
-
-    public ModelFunction getModelFunction() {
-        return new ModelFunction(new MultivariateVectorFunction() {
-                public double[] value(double[] params) {
-                    Vector2D center = new Vector2D(params[0], params[1]);
-                    double radius = getRadius(center);
-                    double[] residuals = new double[points.size()];
-                    for (int i = 0; i < residuals.length; i++) {
-                        residuals[i] = points.get(i).distance(center) - radius;
-                    }
-
-                    return residuals;
-                }
-        });
-    }
-
-    public ModelFunctionJacobian getModelFunctionJacobian() {
-        return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
-                public double[][] value(double[] params) {
-                    final int n = points.size();
-                    final Vector2D center = new Vector2D(params[0], params[1]);
-
-                    double dRdX = 0;
-                    double dRdY = 0;
-                    for (Vector2D pk : points) {
-                        double dk = pk.distance(center);
-                        dRdX += (center.getX() - pk.getX()) / dk;
-                        dRdY += (center.getY() - pk.getY()) / dk;
-                    }
-                    dRdX /= n;
-                    dRdY /= n;
-
-                    // Jacobian of the radius residuals.
-                    double[][] jacobian = new double[n][2];
-                    for (int i = 0; i < n; i++) {
-                        final Vector2D pi = points.get(i);
-                        final double di = pi.distance(center);
-                        jacobian[i][0] = (center.getX() - pi.getX()) / di - dRdX;
-                        jacobian[i][1] = (center.getY() - pi.getY()) / di - dRdY;
-                    }
-
-                    return jacobian;
-                }
-        });
-    }
-}

http://git-wip-us.apache.org/repos/asf/commons-math/blob/e31fde87/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/GaussNewtonOptimizerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/GaussNewtonOptimizerTest.java b/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/GaussNewtonOptimizerTest.java
deleted file mode 100644
index 7e73a9a..0000000
--- a/src/test/java/org/apache/commons/math4/optim/nonlinear/vector/jacobian/GaussNewtonOptimizerTest.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optim.nonlinear.vector.jacobian;
-
-import java.io.IOException;
-
-import org.apache.commons.math4.exception.ConvergenceException;
-import org.apache.commons.math4.exception.MathUnsupportedOperationException;
-import org.apache.commons.math4.exception.TooManyEvaluationsException;
-import org.apache.commons.math4.optim.InitialGuess;
-import org.apache.commons.math4.optim.MaxEval;
-import org.apache.commons.math4.optim.SimpleBounds;
-import org.apache.commons.math4.optim.SimpleVectorValueChecker;
-import org.apache.commons.math4.optim.nonlinear.vector.Target;
-import org.apache.commons.math4.optim.nonlinear.vector.Weight;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.AbstractLeastSquaresOptimizer;
-import org.apache.commons.math4.optim.nonlinear.vector.jacobian.GaussNewtonOptimizer;
-import org.junit.Test;
-
-/**
- * <p>Some of the unit tests are re-implementations of the MINPACK <a
- * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
- * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files.
- * The redistribution policy for MINPACK is available <a
- * href="http://www.netlib.org/minpack/disclaimer">here</a>, for
- * convenience, it is reproduced below.</p>
-
- * <table border="0" width="80%" cellpadding="10" align="center" bgcolor="#E0E0E0">
- * <tr><td>
- *    Minpack Copyright Notice (1999) University of Chicago.
- *    All rights reserved
- * </td></tr>
- * <tr><td>
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * <ol>
- *  <li>Redistributions of source code must retain the above copyright
- *      notice, this list of conditions and the following disclaimer.</li>
- * <li>Redistributions in binary form must reproduce the above
- *     copyright notice, this list of conditions and the following
- *     disclaimer in the documentation and/or other materials provided
- *     with the distribution.</li>
- * <li>The end-user documentation included with the redistribution, if any,
- *     must include the following acknowledgment:
- *     <code>This product includes software developed by the University of
- *           Chicago, as Operator of Argonne National Laboratory.</code>
- *     Alternately, this acknowledgment may appear in the software itself,
- *     if and wherever such third-party acknowledgments normally appear.</li>
- * <li><strong>WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS"
- *     WITHOUT WARRANTY OF ANY KIND. THE COPYRIGHT HOLDER, THE
- *     UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND
- *     THEIR EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR
- *     IMPLIED, INCLUDING BUT NOT LIMITED TO ANY IMPLIED WARRANTIES
- *     OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE
- *     OR NON-INFRINGEMENT, (2) DO NOT ASSUME ANY LEGAL LIABILITY
- *     OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR
- *     USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF
- *     THE SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4)
- *     DO NOT WARRANT THAT THE SOFTWARE WILL FUNCTION
- *     UNINTERRUPTED, THAT IT IS ERROR-FREE OR THAT ANY ERRORS WILL
- *     BE CORRECTED.</strong></li>
- * <li><strong>LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT
- *     HOLDER, THE UNITED STATES, THE UNITED STATES DEPARTMENT OF
- *     ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT,
- *     INCIDENTAL, CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF
- *     ANY KIND OR NATURE, INCLUDING BUT NOT LIMITED TO LOSS OF
- *     PROFITS OR LOSS OF DATA, FOR ANY REASON WHATSOEVER, WHETHER
- *     SUCH LIABILITY IS ASSERTED ON THE BASIS OF CONTRACT, TORT
- *     (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR OTHERWISE,
- *     EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
- *     POSSIBILITY OF SUCH LOSS OR DAMAGES.</strong></li>
- * <ol></td></tr>
- * </table>
-
- * @author Argonne National Laboratory. MINPACK project. March 1980 (original fortran minpack tests)
- * @author Burton S. Garbow (original fortran minpack tests)
- * @author Kenneth E. Hillstrom (original fortran minpack tests)
- * @author Jorge J. More (original fortran minpack tests)
- * @author Luc Maisonobe (non-minpack tests and minpack tests Java translation)
- */
-@Deprecated
-public class GaussNewtonOptimizerTest
-    extends AbstractLeastSquaresOptimizerAbstractTest {
-
-    @Override
-    public AbstractLeastSquaresOptimizer createOptimizer() {
-        return new GaussNewtonOptimizer(new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
-    }
-
-    @Test(expected=MathUnsupportedOperationException.class)
-    public void testConstraintsUnsupported() {
-        createOptimizer().optimize(new MaxEval(100),
-                                   new Target(new double[] { 2 }),
-                                   new Weight(new double[] { 1 }),
-                                   new InitialGuess(new double[] { 1, 2 }),
-                                   new SimpleBounds(new double[] { -10, 0 },
-                                                    new double[] { 20, 30 }));
-    }
-
-    @Override
-    @Test(expected = ConvergenceException.class)
-    public void testMoreEstimatedParametersSimple() {
-        /*
-         * Exception is expected with this optimizer
-         */
-        super.testMoreEstimatedParametersSimple();
-    }
-
-    @Override
-    @Test(expected=ConvergenceException.class)
-    public void testMoreEstimatedParametersUnsorted() {
-        /*
-         * Exception is expected with this optimizer
-         */
-        super.testMoreEstimatedParametersUnsorted();
-    }
-
-    @Test(expected=TooManyEvaluationsException.class)
-    public void testMaxEvaluations() throws Exception {
-        CircleVectorial circle = new CircleVectorial();
-        circle.addPoint( 30.0,  68.0);
-        circle.addPoint( 50.0,  -6.0);
-        circle.addPoint(110.0, -20.0);
-        circle.addPoint( 35.0,  15.0);
-        circle.addPoint( 45.0,  97.0);
-
-        GaussNewtonOptimizer optimizer
-            = new GaussNewtonOptimizer(new SimpleVectorValueChecker(1e-30, 1e-30));
-
-        optimizer.optimize(new MaxEval(100),
-                           circle.getModelFunction(),
-                           circle.getModelFunctionJacobian(),
-                           new Target(new double[] { 0, 0, 0, 0, 0 }),
-                           new Weight(new double[] { 1, 1, 1, 1, 1 }),
-                           new InitialGuess(new double[] { 98.680, 47.345 }));
-    }
-
-    @Override
-    @Test(expected=ConvergenceException.class)
-    public void testCircleFittingBadInit() {
-        /*
-         * This test does not converge with this optimizer.
-         */
-        super.testCircleFittingBadInit();
-    }
-
-    @Override
-    @Test(expected = ConvergenceException.class)
-    public void testHahn1()
-        throws IOException {
-        /*
-         * TODO This test leads to a singular problem with the Gauss-Newton
-         * optimizer. This should be inquired.
-         */
-        super.testHahn1();
-    }
-}