You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by tn...@apache.org on 2015/02/25 22:49:32 UTC
[04/18] [math] Remove deprecated optimization package.
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/NonLinearConjugateGradientOptimizerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/NonLinearConjugateGradientOptimizerTest.java b/src/test/java/org/apache/commons/math4/optimization/general/NonLinearConjugateGradientOptimizerTest.java
deleted file mode 100644
index d9000a8..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/NonLinearConjugateGradientOptimizerTest.java
+++ /dev/null
@@ -1,388 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optimization.general;
-
-import java.io.Serializable;
-
-import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
-import org.apache.commons.math4.analysis.differentiation.MultivariateDifferentiableFunction;
-import org.apache.commons.math4.analysis.solvers.BrentSolver;
-import org.apache.commons.math4.geometry.euclidean.twod.Vector2D;
-import org.apache.commons.math4.linear.BlockRealMatrix;
-import org.apache.commons.math4.linear.RealMatrix;
-import org.apache.commons.math4.optimization.GoalType;
-import org.apache.commons.math4.optimization.PointValuePair;
-import org.apache.commons.math4.optimization.SimpleValueChecker;
-import org.apache.commons.math4.optimization.general.ConjugateGradientFormula;
-import org.apache.commons.math4.optimization.general.NonLinearConjugateGradientOptimizer;
-import org.apache.commons.math4.optimization.general.Preconditioner;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * <p>Some of the unit tests are re-implementations of the MINPACK <a
- * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a
- * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files.
- * The redistribution policy for MINPACK is available <a
- * href="http://www.netlib.org/minpack/disclaimer">here</a>, for
- * convenience, it is reproduced below.</p>
-
- * <table border="0" width="80%" cellpadding="10" align="center" bgcolor="#E0E0E0">
- * <tr><td>
- * Minpack Copyright Notice (1999) University of Chicago.
- * All rights reserved
- * </td></tr>
- * <tr><td>
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * <ol>
- * <li>Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.</li>
- * <li>Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following
- * disclaimer in the documentation and/or other materials provided
- * with the distribution.</li>
- * <li>The end-user documentation included with the redistribution, if any,
- * must include the following acknowledgment:
- * <code>This product includes software developed by the University of
- * Chicago, as Operator of Argonne National Laboratory.</code>
- * Alternately, this acknowledgment may appear in the software itself,
- * if and wherever such third-party acknowledgments normally appear.</li>
- * <li><strong>WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS"
- * WITHOUT WARRANTY OF ANY KIND. THE COPYRIGHT HOLDER, THE
- * UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND
- * THEIR EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO ANY IMPLIED WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE
- * OR NON-INFRINGEMENT, (2) DO NOT ASSUME ANY LEGAL LIABILITY
- * OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR
- * USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF
- * THE SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4)
- * DO NOT WARRANT THAT THE SOFTWARE WILL FUNCTION
- * UNINTERRUPTED, THAT IT IS ERROR-FREE OR THAT ANY ERRORS WILL
- * BE CORRECTED.</strong></li>
- * <li><strong>LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT
- * HOLDER, THE UNITED STATES, THE UNITED STATES DEPARTMENT OF
- * ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT,
- * INCIDENTAL, CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF
- * ANY KIND OR NATURE, INCLUDING BUT NOT LIMITED TO LOSS OF
- * PROFITS OR LOSS OF DATA, FOR ANY REASON WHATSOEVER, WHETHER
- * SUCH LIABILITY IS ASSERTED ON THE BASIS OF CONTRACT, TORT
- * (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR OTHERWISE,
- * EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
- * POSSIBILITY OF SUCH LOSS OR DAMAGES.</strong></li>
- * <ol></td></tr>
- * </table>
-
- * @author Argonne National Laboratory. MINPACK project. March 1980 (original fortran minpack tests)
- * @author Burton S. Garbow (original fortran minpack tests)
- * @author Kenneth E. Hillstrom (original fortran minpack tests)
- * @author Jorge J. More (original fortran minpack tests)
- * @author Luc Maisonobe (non-minpack tests and minpack tests Java translation)
- */
-@Deprecated
-public class NonLinearConjugateGradientOptimizerTest {
- @Test
- public void testTrivial() {
- LinearProblem problem =
- new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0 });
- Assert.assertEquals(1.5, optimum.getPoint()[0], 1.0e-10);
- Assert.assertEquals(0.0, optimum.getValue(), 1.0e-10);
- }
-
- @Test
- public void testColumnsPermutation() {
- LinearProblem problem =
- new LinearProblem(new double[][] { { 1.0, -1.0 }, { 0.0, 2.0 }, { 1.0, -2.0 } },
- new double[] { 4.0, 6.0, 1.0 });
-
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0, 0 });
- Assert.assertEquals(7.0, optimum.getPoint()[0], 1.0e-10);
- Assert.assertEquals(3.0, optimum.getPoint()[1], 1.0e-10);
- Assert.assertEquals(0.0, optimum.getValue(), 1.0e-10);
-
- }
-
- @Test
- public void testNoDependency() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 2, 0, 0, 0, 0, 0 },
- { 0, 2, 0, 0, 0, 0 },
- { 0, 0, 2, 0, 0, 0 },
- { 0, 0, 0, 2, 0, 0 },
- { 0, 0, 0, 0, 2, 0 },
- { 0, 0, 0, 0, 0, 2 }
- }, new double[] { 0.0, 1.1, 2.2, 3.3, 4.4, 5.5 });
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0, 0, 0, 0, 0, 0 });
- for (int i = 0; i < problem.target.length; ++i) {
- Assert.assertEquals(0.55 * i, optimum.getPoint()[i], 1.0e-10);
- }
- }
-
- @Test
- public void testOneSet() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 1, 0, 0 },
- { -1, 1, 0 },
- { 0, -1, 1 }
- }, new double[] { 1, 1, 1});
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0, 0, 0 });
- Assert.assertEquals(1.0, optimum.getPoint()[0], 1.0e-10);
- Assert.assertEquals(2.0, optimum.getPoint()[1], 1.0e-10);
- Assert.assertEquals(3.0, optimum.getPoint()[2], 1.0e-10);
-
- }
-
- @Test
- public void testTwoSets() {
- final double epsilon = 1.0e-7;
- LinearProblem problem = new LinearProblem(new double[][] {
- { 2, 1, 0, 4, 0, 0 },
- { -4, -2, 3, -7, 0, 0 },
- { 4, 1, -2, 8, 0, 0 },
- { 0, -3, -12, -1, 0, 0 },
- { 0, 0, 0, 0, epsilon, 1 },
- { 0, 0, 0, 0, 1, 1 }
- }, new double[] { 2, -9, 2, 2, 1 + epsilon * epsilon, 2});
-
- final Preconditioner preconditioner
- = new Preconditioner() {
- public double[] precondition(double[] point, double[] r) {
- double[] d = r.clone();
- d[0] /= 72.0;
- d[1] /= 30.0;
- d[2] /= 314.0;
- d[3] /= 260.0;
- d[4] /= 2 * (1 + epsilon * epsilon);
- d[5] /= 4.0;
- return d;
- }
- };
-
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-13, 1e-13),
- new BrentSolver(),
- preconditioner);
-
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0, 0, 0, 0, 0, 0 });
- Assert.assertEquals( 3.0, optimum.getPoint()[0], 1.0e-10);
- Assert.assertEquals( 4.0, optimum.getPoint()[1], 1.0e-10);
- Assert.assertEquals(-1.0, optimum.getPoint()[2], 1.0e-10);
- Assert.assertEquals(-2.0, optimum.getPoint()[3], 1.0e-10);
- Assert.assertEquals( 1.0 + epsilon, optimum.getPoint()[4], 1.0e-10);
- Assert.assertEquals( 1.0 - epsilon, optimum.getPoint()[5], 1.0e-10);
-
- }
-
- @Test
- public void testNonInversible() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 1, 2, -3 },
- { 2, 1, 3 },
- { -3, 0, -9 }
- }, new double[] { 1, 1, 1 });
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 0, 0, 0 });
- Assert.assertTrue(optimum.getValue() > 0.5);
- }
-
- @Test
- public void testIllConditioned() {
- LinearProblem problem1 = new LinearProblem(new double[][] {
- { 10.0, 7.0, 8.0, 7.0 },
- { 7.0, 5.0, 6.0, 5.0 },
- { 8.0, 6.0, 10.0, 9.0 },
- { 7.0, 5.0, 9.0, 10.0 }
- }, new double[] { 32, 23, 33, 31 });
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-13, 1e-13),
- new BrentSolver(1e-15, 1e-15));
- PointValuePair optimum1 =
- optimizer.optimize(200, problem1, GoalType.MINIMIZE, new double[] { 0, 1, 2, 3 });
- Assert.assertEquals(1.0, optimum1.getPoint()[0], 1.0e-4);
- Assert.assertEquals(1.0, optimum1.getPoint()[1], 1.0e-4);
- Assert.assertEquals(1.0, optimum1.getPoint()[2], 1.0e-4);
- Assert.assertEquals(1.0, optimum1.getPoint()[3], 1.0e-4);
-
- LinearProblem problem2 = new LinearProblem(new double[][] {
- { 10.00, 7.00, 8.10, 7.20 },
- { 7.08, 5.04, 6.00, 5.00 },
- { 8.00, 5.98, 9.89, 9.00 },
- { 6.99, 4.99, 9.00, 9.98 }
- }, new double[] { 32, 23, 33, 31 });
- PointValuePair optimum2 =
- optimizer.optimize(200, problem2, GoalType.MINIMIZE, new double[] { 0, 1, 2, 3 });
- Assert.assertEquals(-81.0, optimum2.getPoint()[0], 1.0e-1);
- Assert.assertEquals(137.0, optimum2.getPoint()[1], 1.0e-1);
- Assert.assertEquals(-34.0, optimum2.getPoint()[2], 1.0e-1);
- Assert.assertEquals( 22.0, optimum2.getPoint()[3], 1.0e-1);
-
- }
-
- @Test
- public void testMoreEstimatedParametersSimple() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 3.0, 2.0, 0.0, 0.0 },
- { 0.0, 1.0, -1.0, 1.0 },
- { 2.0, 0.0, 1.0, 0.0 }
- }, new double[] { 7.0, 3.0, 5.0 });
-
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 7, 6, 5, 4 });
- Assert.assertEquals(0, optimum.getValue(), 1.0e-10);
-
- }
-
- @Test
- public void testMoreEstimatedParametersUnsorted() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 1.0, 1.0, 0.0, 0.0, 0.0, 0.0 },
- { 0.0, 0.0, 1.0, 1.0, 1.0, 0.0 },
- { 0.0, 0.0, 0.0, 0.0, 1.0, -1.0 },
- { 0.0, 0.0, -1.0, 1.0, 0.0, 1.0 },
- { 0.0, 0.0, 0.0, -1.0, 1.0, 0.0 }
- }, new double[] { 3.0, 12.0, -1.0, 7.0, 1.0 });
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 2, 2, 2, 2, 2, 2 });
- Assert.assertEquals(0, optimum.getValue(), 1.0e-10);
- }
-
- @Test
- public void testRedundantEquations() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 1.0, 1.0 },
- { 1.0, -1.0 },
- { 1.0, 3.0 }
- }, new double[] { 3.0, 1.0, 5.0 });
-
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 1, 1 });
- Assert.assertEquals(2.0, optimum.getPoint()[0], 1.0e-8);
- Assert.assertEquals(1.0, optimum.getPoint()[1], 1.0e-8);
-
- }
-
- @Test
- public void testInconsistentEquations() {
- LinearProblem problem = new LinearProblem(new double[][] {
- { 1.0, 1.0 },
- { 1.0, -1.0 },
- { 1.0, 3.0 }
- }, new double[] { 3.0, 1.0, 4.0 });
-
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-6, 1e-6));
- PointValuePair optimum =
- optimizer.optimize(100, problem, GoalType.MINIMIZE, new double[] { 1, 1 });
- Assert.assertTrue(optimum.getValue() > 0.1);
-
- }
-
- @Test
- public void testCircleFitting() {
- CircleScalar circle = new CircleScalar();
- circle.addPoint( 30.0, 68.0);
- circle.addPoint( 50.0, -6.0);
- circle.addPoint(110.0, -20.0);
- circle.addPoint( 35.0, 15.0);
- circle.addPoint( 45.0, 97.0);
- NonLinearConjugateGradientOptimizer optimizer =
- new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
- new SimpleValueChecker(1e-30, 1e-30),
- new BrentSolver(1e-15, 1e-13));
- PointValuePair optimum =
- optimizer.optimize(100, circle, GoalType.MINIMIZE, new double[] { 98.680, 47.345 });
- Vector2D center = new Vector2D(optimum.getPointRef()[0], optimum.getPointRef()[1]);
- Assert.assertEquals(69.960161753, circle.getRadius(center), 1.0e-8);
- Assert.assertEquals(96.075902096, center.getX(), 1.0e-8);
- Assert.assertEquals(48.135167894, center.getY(), 1.0e-8);
- }
-
- private static class LinearProblem implements MultivariateDifferentiableFunction, Serializable {
-
- private static final long serialVersionUID = 703247177355019415L;
- final RealMatrix factors;
- final double[] target;
- public LinearProblem(double[][] factors, double[] target) {
- this.factors = new BlockRealMatrix(factors);
- this.target = target;
- }
-
- public double value(double[] variables) {
- double[] y = factors.operate(variables);
- double sum = 0;
- for (int i = 0; i < y.length; ++i) {
- double ri = y[i] - target[i];
- sum += ri * ri;
- }
- return sum;
- }
-
- public DerivativeStructure value(DerivativeStructure[] variables) {
- DerivativeStructure[] y = new DerivativeStructure[factors.getRowDimension()];
- for (int i = 0; i < y.length; ++i) {
- y[i] = variables[0].getField().getZero();
- for (int j = 0; j < factors.getColumnDimension(); ++j) {
- y[i] = y[i].add(variables[j].multiply(factors.getEntry(i, j)));
- }
- }
-
- DerivativeStructure sum = variables[0].getField().getZero();
- for (int i = 0; i < y.length; ++i) {
- DerivativeStructure ri = y[i].subtract(target[i]);
- sum = sum.add(ri.multiply(ri));
- }
- return sum;
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/RandomCirclePointGenerator.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/RandomCirclePointGenerator.java b/src/test/java/org/apache/commons/math4/optimization/general/RandomCirclePointGenerator.java
deleted file mode 100644
index 07ace1f..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/RandomCirclePointGenerator.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optimization.general;
-
-import org.apache.commons.math4.distribution.NormalDistribution;
-import org.apache.commons.math4.distribution.RealDistribution;
-import org.apache.commons.math4.distribution.UniformRealDistribution;
-import org.apache.commons.math4.geometry.euclidean.twod.Vector2D;
-import org.apache.commons.math4.random.RandomGenerator;
-import org.apache.commons.math4.random.Well44497b;
-import org.apache.commons.math4.util.FastMath;
-import org.apache.commons.math4.util.MathUtils;
-
-/**
- * Factory for generating a cloud of points that approximate a circle.
- */
-@Deprecated
-public class RandomCirclePointGenerator {
- /** RNG for the x-coordinate of the center. */
- private final RealDistribution cX;
- /** RNG for the y-coordinate of the center. */
- private final RealDistribution cY;
- /** RNG for the parametric position of the point. */
- private final RealDistribution tP;
- /** Radius of the circle. */
- private final double radius;
-
- /**
- * @param x Abscissa of the circle center.
- * @param y Ordinate of the circle center.
- * @param radius Radius of the circle.
- * @param xSigma Error on the x-coordinate of the circumference points.
- * @param ySigma Error on the y-coordinate of the circumference points.
- * @param seed RNG seed.
- */
- public RandomCirclePointGenerator(double x,
- double y,
- double radius,
- double xSigma,
- double ySigma,
- long seed) {
- final RandomGenerator rng = new Well44497b(seed);
- this.radius = radius;
- cX = new NormalDistribution(rng, x, xSigma,
- NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
- cY = new NormalDistribution(rng, y, ySigma,
- NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
- tP = new UniformRealDistribution(rng, 0, MathUtils.TWO_PI);
- }
-
- /**
- * Point generator.
- *
- * @param n Number of points to create.
- * @return the cloud of {@code n} points.
- */
- public Vector2D[] generate(int n) {
- final Vector2D[] cloud = new Vector2D[n];
- for (int i = 0; i < n; i++) {
- cloud[i] = create();
- }
- return cloud;
- }
-
- /**
- * Create one point.
- *
- * @return a point.
- */
- private Vector2D create() {
- final double t = tP.sample();
- final double pX = cX.sample() + radius * FastMath.cos(t);
- final double pY = cY.sample() + radius * FastMath.sin(t);
-
- return new Vector2D(pX, pY);
- }
-}
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/RandomStraightLinePointGenerator.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/RandomStraightLinePointGenerator.java b/src/test/java/org/apache/commons/math4/optimization/general/RandomStraightLinePointGenerator.java
deleted file mode 100644
index e591962..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/RandomStraightLinePointGenerator.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optimization.general;
-
-import java.awt.geom.Point2D;
-
-import org.apache.commons.math4.distribution.NormalDistribution;
-import org.apache.commons.math4.distribution.RealDistribution;
-import org.apache.commons.math4.distribution.UniformRealDistribution;
-import org.apache.commons.math4.random.RandomGenerator;
-import org.apache.commons.math4.random.Well44497b;
-
-/**
- * Factory for generating a cloud of points that approximate a straight line.
- */
-@Deprecated
-public class RandomStraightLinePointGenerator {
- /** Slope. */
- private final double slope;
- /** Intercept. */
- private final double intercept;
- /** RNG for the x-coordinate. */
- private final RealDistribution x;
- /** RNG for the error on the y-coordinate. */
- private final RealDistribution error;
-
- /**
- * The generator will create a cloud of points whose x-coordinates
- * will be randomly sampled between {@code xLo} and {@code xHi}, and
- * the corresponding y-coordinates will be computed as
- * <pre><code>
- * y = a x + b + N(0, error)
- * </code></pre>
- * where {@code N(mean, sigma)} is a Gaussian distribution with the
- * given mean and standard deviation.
- *
- * @param a Slope.
- * @param b Intercept.
- * @param sigma Standard deviation on the y-coordinate of the point.
- * @param lo Lowest value of the x-coordinate.
- * @param hi Highest value of the x-coordinate.
- * @param seed RNG seed.
- */
- public RandomStraightLinePointGenerator(double a,
- double b,
- double sigma,
- double lo,
- double hi,
- long seed) {
- final RandomGenerator rng = new Well44497b(seed);
- slope = a;
- intercept = b;
- error = new NormalDistribution(rng, 0, sigma,
- NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
- x = new UniformRealDistribution(rng, lo, hi);
- }
-
- /**
- * Point generator.
- *
- * @param n Number of points to create.
- * @return the cloud of {@code n} points.
- */
- public Point2D.Double[] generate(int n) {
- final Point2D.Double[] cloud = new Point2D.Double[n];
- for (int i = 0; i < n; i++) {
- cloud[i] = create();
- }
- return cloud;
- }
-
- /**
- * Create one point.
- *
- * @return a point.
- */
- private Point2D.Double create() {
- final double abscissa = x.sample();
- final double yModel = slope * abscissa + intercept;
- final double ordinate = yModel + error.sample();
-
- return new Point2D.Double(abscissa, ordinate);
- }
-}
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDataset.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDataset.java b/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDataset.java
deleted file mode 100644
index 2b7f6ca..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDataset.java
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.math4.optimization.general;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
-import org.apache.commons.math4.analysis.differentiation.MultivariateDifferentiableVectorFunction;
-import org.apache.commons.math4.util.MathArrays;
-
-/**
- * This class gives access to the statistical reference datasets provided by the
- * NIST (available
- * <a href="http://www.itl.nist.gov/div898/strd/general/dataarchive.html">here</a>).
- * Instances of this class can be created by invocation of the
- * {@link StatisticalReferenceDatasetFactory}.
- */
-@Deprecated
-public abstract class StatisticalReferenceDataset {
-
- /** The name of this dataset. */
- private final String name;
-
- /** The total number of observations (data points). */
- private final int numObservations;
-
- /** The total number of parameters. */
- private final int numParameters;
-
- /** The total number of starting points for the optimizations. */
- private final int numStartingPoints;
-
- /** The values of the predictor. */
- private final double[] x;
-
- /** The values of the response. */
- private final double[] y;
-
- /**
- * The starting values. {@code startingValues[j][i]} is the value of the
- * {@code i}-th parameter in the {@code j}-th set of starting values.
- */
- private final double[][] startingValues;
-
- /** The certified values of the parameters. */
- private final double[] a;
-
- /** The certified values of the standard deviation of the parameters. */
- private final double[] sigA;
-
- /** The certified value of the residual sum of squares. */
- private double residualSumOfSquares;
-
- /** The least-squares problem. */
- private final MultivariateDifferentiableVectorFunction problem;
-
- /**
- * Creates a new instance of this class from the specified data file. The
- * file must follow the StRD format.
- *
- * @param in the data file
- * @throws IOException if an I/O error occurs
- */
- public StatisticalReferenceDataset(final BufferedReader in)
- throws IOException {
-
- final ArrayList<String> lines = new ArrayList<String>();
- for (String line = in.readLine(); line != null; line = in.readLine()) {
- lines.add(line);
- }
- int[] index = findLineNumbers("Data", lines);
- if (index == null) {
- throw new AssertionError("could not find line indices for data");
- }
- this.numObservations = index[1] - index[0] + 1;
- this.x = new double[this.numObservations];
- this.y = new double[this.numObservations];
- for (int i = 0; i < this.numObservations; i++) {
- final String line = lines.get(index[0] + i - 1);
- final String[] tokens = line.trim().split(" ++");
- // Data columns are in reverse order!!!
- this.y[i] = Double.parseDouble(tokens[0]);
- this.x[i] = Double.parseDouble(tokens[1]);
- }
-
- index = findLineNumbers("Starting Values", lines);
- if (index == null) {
- throw new AssertionError(
- "could not find line indices for starting values");
- }
- this.numParameters = index[1] - index[0] + 1;
-
- double[][] start = null;
- this.a = new double[numParameters];
- this.sigA = new double[numParameters];
- for (int i = 0; i < numParameters; i++) {
- final String line = lines.get(index[0] + i - 1);
- final String[] tokens = line.trim().split(" ++");
- if (start == null) {
- start = new double[tokens.length - 4][numParameters];
- }
- for (int j = 2; j < tokens.length - 2; j++) {
- start[j - 2][i] = Double.parseDouble(tokens[j]);
- }
- this.a[i] = Double.parseDouble(tokens[tokens.length - 2]);
- this.sigA[i] = Double.parseDouble(tokens[tokens.length - 1]);
- }
- if (start == null) {
- throw new IOException("could not find starting values");
- }
- this.numStartingPoints = start.length;
- this.startingValues = start;
-
- double dummyDouble = Double.NaN;
- String dummyString = null;
- for (String line : lines) {
- if (line.contains("Dataset Name:")) {
- dummyString = line
- .substring(line.indexOf("Dataset Name:") + 13,
- line.indexOf("(")).trim();
- }
- if (line.contains("Residual Sum of Squares")) {
- final String[] tokens = line.split(" ++");
- dummyDouble = Double.parseDouble(tokens[4].trim());
- }
- }
- if (Double.isNaN(dummyDouble)) {
- throw new IOException(
- "could not find certified value of residual sum of squares");
- }
- this.residualSumOfSquares = dummyDouble;
-
- if (dummyString == null) {
- throw new IOException("could not find dataset name");
- }
- this.name = dummyString;
-
- this.problem = new MultivariateDifferentiableVectorFunction() {
-
- public double[] value(final double[] a) {
- DerivativeStructure[] dsA = new DerivativeStructure[a.length];
- for (int i = 0; i < a.length; ++i) {
- dsA[i] = new DerivativeStructure(a.length, 0, a[i]);
- }
- final int n = getNumObservations();
- final double[] yhat = new double[n];
- for (int i = 0; i < n; i++) {
- yhat[i] = getModelValue(getX(i), dsA).getValue();
- }
- return yhat;
- }
-
- public DerivativeStructure[] value(final DerivativeStructure[] a) {
- final int n = getNumObservations();
- final DerivativeStructure[] yhat = new DerivativeStructure[n];
- for (int i = 0; i < n; i++) {
- yhat[i] = getModelValue(getX(i), a);
- }
- return yhat;
- }
-
- };
- }
-
- /**
- * Returns the name of this dataset.
- *
- * @return the name of the dataset
- */
- public String getName() {
- return name;
- }
-
- /**
- * Returns the total number of observations (data points).
- *
- * @return the number of observations
- */
- public int getNumObservations() {
- return numObservations;
- }
-
- /**
- * Returns a copy of the data arrays. The data is laid out as follows <li>
- * {@code data[0][i] = x[i]},</li> <li>{@code data[1][i] = y[i]},</li>
- *
- * @return the array of data points.
- */
- public double[][] getData() {
- return new double[][] {
- MathArrays.copyOf(x), MathArrays.copyOf(y)
- };
- }
-
- /**
- * Returns the x-value of the {@code i}-th data point.
- *
- * @param i the index of the data point
- * @return the x-value
- */
- public double getX(final int i) {
- return x[i];
- }
-
- /**
- * Returns the y-value of the {@code i}-th data point.
- *
- * @param i the index of the data point
- * @return the y-value
- */
- public double getY(final int i) {
- return y[i];
- }
-
- /**
- * Returns the total number of parameters.
- *
- * @return the number of parameters
- */
- public int getNumParameters() {
- return numParameters;
- }
-
- /**
- * Returns the certified values of the paramters.
- *
- * @return the values of the parameters
- */
- public double[] getParameters() {
- return MathArrays.copyOf(a);
- }
-
- /**
- * Returns the certified value of the {@code i}-th parameter.
- *
- * @param i the index of the parameter
- * @return the value of the parameter
- */
- public double getParameter(final int i) {
- return a[i];
- }
-
- /**
- * Reurns the certified values of the standard deviations of the parameters.
- *
- * @return the standard deviations of the parameters
- */
- public double[] getParametersStandardDeviations() {
- return MathArrays.copyOf(sigA);
- }
-
- /**
- * Returns the certified value of the standard deviation of the {@code i}-th
- * parameter.
- *
- * @param i the index of the parameter
- * @return the standard deviation of the parameter
- */
- public double getParameterStandardDeviation(final int i) {
- return sigA[i];
- }
-
- /**
- * Returns the certified value of the residual sum of squares.
- *
- * @return the residual sum of squares
- */
- public double getResidualSumOfSquares() {
- return residualSumOfSquares;
- }
-
- /**
- * Returns the total number of starting points (initial guesses for the
- * optimization process).
- *
- * @return the number of starting points
- */
- public int getNumStartingPoints() {
- return numStartingPoints;
- }
-
- /**
- * Returns the {@code i}-th set of initial values of the parameters.
- *
- * @param i the index of the starting point
- * @return the starting point
- */
- public double[] getStartingPoint(final int i) {
- return MathArrays.copyOf(startingValues[i]);
- }
-
- /**
- * Returns the least-squares problem corresponding to fitting the model to
- * the specified data.
- *
- * @return the least-squares problem
- */
- public MultivariateDifferentiableVectorFunction getLeastSquaresProblem() {
- return problem;
- }
-
- /**
- * Returns the value of the model for the specified values of the predictor
- * variable and the parameters.
- *
- * @param x the predictor variable
- * @param a the parameters
- * @return the value of the model
- */
- public abstract DerivativeStructure getModelValue(final double x, final DerivativeStructure[] a);
-
- /**
- * <p>
- * Parses the specified text lines, and extracts the indices of the first
- * and last lines of the data defined by the specified {@code key}. This key
- * must be one of
- * </p>
- * <ul>
- * <li>{@code "Starting Values"},</li>
- * <li>{@code "Certified Values"},</li>
- * <li>{@code "Data"}.</li>
- * </ul>
- * <p>
- * In the NIST data files, the line indices are separated by the keywords
- * {@code "lines"} and {@code "to"}.
- * </p>
- *
- * @param lines the line of text to be parsed
- * @return an array of two {@code int}s. First value is the index of the
- * first line, second value is the index of the last line.
- * {@code null} if the line could not be parsed.
- */
- private static int[] findLineNumbers(final String key,
- final Iterable<String> lines) {
- for (String text : lines) {
- boolean flag = text.contains(key) && text.contains("lines") &&
- text.contains("to") && text.contains(")");
- if (flag) {
- final int[] numbers = new int[2];
- final String from = text.substring(text.indexOf("lines") + 5,
- text.indexOf("to"));
- numbers[0] = Integer.parseInt(from.trim());
- final String to = text.substring(text.indexOf("to") + 2,
- text.indexOf(")"));
- numbers[1] = Integer.parseInt(to.trim());
- return numbers;
- }
- }
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDatasetFactory.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDatasetFactory.java b/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDatasetFactory.java
deleted file mode 100644
index f7fa021..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/StatisticalReferenceDatasetFactory.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.math4.optimization.general;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-
-import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
-
-/**
- * A factory to create instances of {@link StatisticalReferenceDataset} from
- * available resources.
- */
-@Deprecated
-public class StatisticalReferenceDatasetFactory {
-
- private StatisticalReferenceDatasetFactory() {
- // Do nothing
- }
-
- /**
- * Creates a new buffered reader from the specified resource name.
- *
- * @param name the name of the resource
- * @return a buffered reader
- * @throws IOException if an I/O error occurred
- */
- public static BufferedReader createBufferedReaderFromResource(final String name)
- throws IOException {
- final InputStream resourceAsStream;
- resourceAsStream = StatisticalReferenceDatasetFactory.class
- .getResourceAsStream(name);
- if (resourceAsStream == null) {
- throw new IOException("could not find resource " + name);
- }
- return new BufferedReader(new InputStreamReader(resourceAsStream));
- }
-
- public static StatisticalReferenceDataset createKirby2()
- throws IOException {
- final BufferedReader in = createBufferedReaderFromResource("Kirby2.dat");
- StatisticalReferenceDataset dataset = null;
- try {
- dataset = new StatisticalReferenceDataset(in) {
-
- @Override
- public DerivativeStructure getModelValue(final double x, final DerivativeStructure[] a) {
- final DerivativeStructure p = a[0].add(a[1].add(a[2].multiply(x)).multiply(x));
- final DerivativeStructure q = a[3].add(a[4].multiply(x)).multiply(x).add(1.0);
- return p.divide(q);
- }
-
- };
- } finally {
- in.close();
- }
- return dataset;
- }
-
- public static StatisticalReferenceDataset createHahn1()
- throws IOException {
- final BufferedReader in = createBufferedReaderFromResource("Hahn1.dat");
- StatisticalReferenceDataset dataset = null;
- try {
- dataset = new StatisticalReferenceDataset(in) {
-
- @Override
- public DerivativeStructure getModelValue(final double x, final DerivativeStructure[] a) {
- final DerivativeStructure p = a[0].add(a[1].add(a[2].add(a[3].multiply(x)).multiply(x)).multiply(x));
- final DerivativeStructure q = a[4].add(a[5].add(a[6].multiply(x)).multiply(x)).multiply(x).add(1.0);
- return p.divide(q);
- }
-
- };
- } finally {
- in.close();
- }
- return dataset;
- }
-
- public static StatisticalReferenceDataset createMGH17()
- throws IOException {
- final BufferedReader in = createBufferedReaderFromResource("MGH17.dat");
- StatisticalReferenceDataset dataset = null;
- try {
- dataset = new StatisticalReferenceDataset(in) {
-
- @Override
- public DerivativeStructure getModelValue(final double x, final DerivativeStructure[] a) {
- return a[0].add(a[1].multiply(a[3].multiply(-x).exp())).add(a[2].multiply(a[4].multiply(-x).exp()));
- }
-
- };
- } finally {
- in.close();
- }
- return dataset;
- }
-
- public static StatisticalReferenceDataset createLanczos1()
- throws IOException {
- final BufferedReader in =
- createBufferedReaderFromResource("Lanczos1.dat");
- StatisticalReferenceDataset dataset = null;
- try {
- dataset = new StatisticalReferenceDataset(in) {
-
- @Override
- public DerivativeStructure getModelValue(final double x, final DerivativeStructure[] a) {
- return a[0].multiply(a[3].multiply(-x).exp()).add(
- a[1].multiply(a[4].multiply(-x).exp())).add(
- a[2].multiply(a[5].multiply(-x).exp()));
- }
-
- };
- } finally {
- in.close();
- }
- return dataset;
- }
-
- /**
- * Returns an array with all available reference datasets.
- *
- * @return the array of datasets
- * @throws IOException if an I/O error occurs
- */
- public StatisticalReferenceDataset[] createAll()
- throws IOException {
- return new StatisticalReferenceDataset[] {
- createKirby2(), createMGH17()
- };
- }
-}
http://git-wip-us.apache.org/repos/asf/commons-math/blob/b4669aad/src/test/java/org/apache/commons/math4/optimization/general/StraightLineProblem.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/math4/optimization/general/StraightLineProblem.java b/src/test/java/org/apache/commons/math4/optimization/general/StraightLineProblem.java
deleted file mode 100644
index a81da4c..0000000
--- a/src/test/java/org/apache/commons/math4/optimization/general/StraightLineProblem.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.commons.math4.optimization.general;
-
-import java.util.ArrayList;
-
-import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
-import org.apache.commons.math4.analysis.differentiation.MultivariateDifferentiableVectorFunction;
-import org.apache.commons.math4.analysis.differentiation.UnivariateDifferentiableFunction;
-import org.apache.commons.math4.stat.regression.SimpleRegression;
-
-/**
- * Class that models a straight line defined as {@code y = a x + b}.
- * The parameters of problem are:
- * <ul>
- * <li>{@code a}</li>
- * <li>{@code b}</li>
- * </ul>
- * The model functions are:
- * <ul>
- * <li>for each pair (a, b), the y-coordinate of the line.</li>
- * </ul>
- */
-@Deprecated
-class StraightLineProblem implements MultivariateDifferentiableVectorFunction {
- /** Cloud of points assumed to be fitted by a straight line. */
- private final ArrayList<double[]> points;
- /** Error (on the y-coordinate of the points). */
- private final double sigma;
-
- /**
- * @param error Assumed error for the y-coordinate.
- */
- public StraightLineProblem(double error) {
- points = new ArrayList<double[]>();
- sigma = error;
- }
-
- public void addPoint(double px, double py) {
- points.add(new double[] { px, py });
- }
-
- /**
- * @return the list of x-coordinates.
- */
- public double[] x() {
- final double[] v = new double[points.size()];
- for (int i = 0; i < points.size(); i++) {
- final double[] p = points.get(i);
- v[i] = p[0]; // x-coordinate.
- }
-
- return v;
- }
-
- /**
- * @return the list of y-coordinates.
- */
- public double[] y() {
- final double[] v = new double[points.size()];
- for (int i = 0; i < points.size(); i++) {
- final double[] p = points.get(i);
- v[i] = p[1]; // y-coordinate.
- }
-
- return v;
- }
-
- public double[] target() {
- return y();
- }
-
- public double[] weight() {
- final double weight = 1 / (sigma * sigma);
- final double[] w = new double[points.size()];
- for (int i = 0; i < points.size(); i++) {
- w[i] = weight;
- }
-
- return w;
- }
-
- public double[] value(double[] params) {
- final Model line = new Model(new DerivativeStructure(0, 0, params[0]),
- new DerivativeStructure(0, 0, params[1]));
-
- final double[] model = new double[points.size()];
- for (int i = 0; i < points.size(); i++) {
- final double[] p = points.get(i);
- model[i] = line.value(p[0]);
- }
-
- return model;
- }
-
- public DerivativeStructure[] value(DerivativeStructure[] params) {
- final Model line = new Model(params[0], params[1]);
-
- final DerivativeStructure[] model = new DerivativeStructure[points.size()];
- for (int i = 0; i < points.size(); i++) {
- final DerivativeStructure p0 = params[0].getField().getZero().add(points.get(i)[0]);
- model[i] = line.value(p0);
- }
-
- return model;
- }
-
- /**
- * Directly solve the linear problem, using the {@link SimpleRegression}
- * class.
- */
- public double[] solve() {
- final SimpleRegression regress = new SimpleRegression(true);
- for (double[] d : points) {
- regress.addData(d[0], d[1]);
- }
-
- final double[] result = { regress.getSlope(), regress.getIntercept() };
- return result;
- }
-
- /**
- * Linear function.
- */
- public static class Model implements UnivariateDifferentiableFunction {
- final DerivativeStructure a;
- final DerivativeStructure b;
-
- public Model(DerivativeStructure a,
- DerivativeStructure b) {
- this.a = a;
- this.b = b;
- }
-
- public double value(double x) {
- return a.getValue() * x + b.getValue();
- }
-
- public DerivativeStructure value(DerivativeStructure x) {
- return x.multiply(a).add(b);
- }
-
- }
-}