You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by er...@apache.org on 2012/12/12 15:11:04 UTC

svn commit: r1420684 [15/15] - in /commons/proper/math/trunk/src: main/java/org/apache/commons/math3/exception/ main/java/org/apache/commons/math3/exception/util/ main/java/org/apache/commons/math3/fitting/ main/java/org/apache/commons/math3/optim/ mai...

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDataset.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDataset.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDataset.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDataset.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,383 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.nonlinear.vector.jacobian;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import org.apache.commons.math3.analysis.MultivariateVectorFunction;
+import org.apache.commons.math3.analysis.MultivariateMatrixFunction;
+import org.apache.commons.math3.optim.nonlinear.vector.ModelFunction;
+import org.apache.commons.math3.optim.nonlinear.vector.ModelFunctionJacobian;
+import org.apache.commons.math3.util.MathArrays;
+
+/**
+ * This class gives access to the statistical reference datasets provided by the
+ * NIST (available
+ * <a href="http://www.itl.nist.gov/div898/strd/general/dataarchive.html">here</a>).
+ * Instances of this class can be created by invocation of the
+ * {@link StatisticalReferenceDatasetFactory}.
+ */
+public abstract class StatisticalReferenceDataset {
+
+    /** The name of this dataset. */
+    private final String name;
+
+    /** The total number of observations (data points). */
+    private final int numObservations;
+
+    /** The total number of parameters. */
+    private final int numParameters;
+
+    /** The total number of starting points for the optimizations. */
+    private final int numStartingPoints;
+
+    /** The values of the predictor. */
+    private final double[] x;
+
+    /** The values of the response. */
+    private final double[] y;
+
+    /**
+     * The starting values. {@code startingValues[j][i]} is the value of the
+     * {@code i}-th parameter in the {@code j}-th set of starting values.
+     */
+    private final double[][] startingValues;
+
+    /** The certified values of the parameters. */
+    private final double[] a;
+
+    /** The certified values of the standard deviation of the parameters. */
+    private final double[] sigA;
+
+    /** The certified value of the residual sum of squares. */
+    private double residualSumOfSquares;
+
+    /** The least-squares problem. */
+    private final LeastSquaresProblem problem;
+
+    /**
+     * Creates a new instance of this class from the specified data file. The
+     * file must follow the StRD format.
+     *
+     * @param in the data file
+     * @throws IOException if an I/O error occurs
+     */
+    public StatisticalReferenceDataset(final BufferedReader in)
+        throws IOException {
+
+        final ArrayList<String> lines = new ArrayList<String>();
+        for (String line = in.readLine(); line != null; line = in.readLine()) {
+            lines.add(line);
+        }
+        int[] index = findLineNumbers("Data", lines);
+        if (index == null) {
+            throw new AssertionError("could not find line indices for data");
+        }
+        this.numObservations = index[1] - index[0] + 1;
+        this.x = new double[this.numObservations];
+        this.y = new double[this.numObservations];
+        for (int i = 0; i < this.numObservations; i++) {
+            final String line = lines.get(index[0] + i - 1);
+            final String[] tokens = line.trim().split(" ++");
+            // Data columns are in reverse order!!!
+            this.y[i] = Double.parseDouble(tokens[0]);
+            this.x[i] = Double.parseDouble(tokens[1]);
+        }
+
+        index = findLineNumbers("Starting Values", lines);
+        if (index == null) {
+            throw new AssertionError(
+                                     "could not find line indices for starting values");
+        }
+        this.numParameters = index[1] - index[0] + 1;
+
+        double[][] start = null;
+        this.a = new double[numParameters];
+        this.sigA = new double[numParameters];
+        for (int i = 0; i < numParameters; i++) {
+            final String line = lines.get(index[0] + i - 1);
+            final String[] tokens = line.trim().split(" ++");
+            if (start == null) {
+                start = new double[tokens.length - 4][numParameters];
+            }
+            for (int j = 2; j < tokens.length - 2; j++) {
+                start[j - 2][i] = Double.parseDouble(tokens[j]);
+            }
+            this.a[i] = Double.parseDouble(tokens[tokens.length - 2]);
+            this.sigA[i] = Double.parseDouble(tokens[tokens.length - 1]);
+        }
+        if (start == null) {
+            throw new IOException("could not find starting values");
+        }
+        this.numStartingPoints = start.length;
+        this.startingValues = start;
+
+        double dummyDouble = Double.NaN;
+        String dummyString = null;
+        for (String line : lines) {
+            if (line.contains("Dataset Name:")) {
+                dummyString = line
+                    .substring(line.indexOf("Dataset Name:") + 13,
+                               line.indexOf("(")).trim();
+            }
+            if (line.contains("Residual Sum of Squares")) {
+                final String[] tokens = line.split(" ++");
+                dummyDouble = Double.parseDouble(tokens[4].trim());
+            }
+        }
+        if (Double.isNaN(dummyDouble)) {
+            throw new IOException(
+                                  "could not find certified value of residual sum of squares");
+        }
+        this.residualSumOfSquares = dummyDouble;
+
+        if (dummyString == null) {
+            throw new IOException("could not find dataset name");
+        }
+        this.name = dummyString;
+
+        this.problem = new LeastSquaresProblem();
+    }
+
+    class LeastSquaresProblem {
+        public ModelFunction getModelFunction() {
+            return new ModelFunction(new MultivariateVectorFunction() {
+                    public double[] value(final double[] a) {
+                        final int n = getNumObservations();
+                        final double[] yhat = new double[n];
+                        for (int i = 0; i < n; i++) {
+                            yhat[i] = getModelValue(getX(i), a);
+                        }
+                        return yhat;
+                    }
+                });
+        }
+
+        public ModelFunctionJacobian getModelFunctionJacobian() {
+            return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
+                    public double[][] value(final double[] a)
+                        throws IllegalArgumentException {
+                        final int n = getNumObservations();
+                        final double[][] j = new double[n][];
+                        for (int i = 0; i < n; i++) {
+                            j[i] = getModelDerivatives(getX(i), a);
+                        }
+                        return j;
+                    }
+                });
+        }
+    }
+
+    /**
+     * Returns the name of this dataset.
+     *
+     * @return the name of the dataset
+     */
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Returns the total number of observations (data points).
+     *
+     * @return the number of observations
+     */
+    public int getNumObservations() {
+        return numObservations;
+    }
+
+    /**
+     * Returns a copy of the data arrays. The data is laid out as follows <li>
+     * {@code data[0][i] = x[i]},</li> <li>{@code data[1][i] = y[i]},</li>
+     *
+     * @return the array of data points.
+     */
+    public double[][] getData() {
+        return new double[][] {
+            MathArrays.copyOf(x), MathArrays.copyOf(y)
+        };
+    }
+
+    /**
+     * Returns the x-value of the {@code i}-th data point.
+     *
+     * @param i the index of the data point
+     * @return the x-value
+     */
+    public double getX(final int i) {
+        return x[i];
+    }
+
+    /**
+     * Returns the y-value of the {@code i}-th data point.
+     *
+     * @param i the index of the data point
+     * @return the y-value
+     */
+    public double getY(final int i) {
+        return y[i];
+    }
+
+    /**
+     * Returns the total number of parameters.
+     *
+     * @return the number of parameters
+     */
+    public int getNumParameters() {
+        return numParameters;
+    }
+
+    /**
+     * Returns the certified values of the paramters.
+     *
+     * @return the values of the parameters
+     */
+    public double[] getParameters() {
+        return MathArrays.copyOf(a);
+    }
+
+    /**
+     * Returns the certified value of the {@code i}-th parameter.
+     *
+     * @param i the index of the parameter
+     * @return the value of the parameter
+     */
+    public double getParameter(final int i) {
+        return a[i];
+    }
+
+    /**
+     * Reurns the certified values of the standard deviations of the parameters.
+     *
+     * @return the standard deviations of the parameters
+     */
+    public double[] getParametersStandardDeviations() {
+        return MathArrays.copyOf(sigA);
+    }
+
+    /**
+     * Returns the certified value of the standard deviation of the {@code i}-th
+     * parameter.
+     *
+     * @param i the index of the parameter
+     * @return the standard deviation of the parameter
+     */
+    public double getParameterStandardDeviation(final int i) {
+        return sigA[i];
+    }
+
+    /**
+     * Returns the certified value of the residual sum of squares.
+     *
+     * @return the residual sum of squares
+     */
+    public double getResidualSumOfSquares() {
+        return residualSumOfSquares;
+    }
+
+    /**
+     * Returns the total number of starting points (initial guesses for the
+     * optimization process).
+     *
+     * @return the number of starting points
+     */
+    public int getNumStartingPoints() {
+        return numStartingPoints;
+    }
+
+    /**
+     * Returns the {@code i}-th set of initial values of the parameters.
+     *
+     * @param i the index of the starting point
+     * @return the starting point
+     */
+    public double[] getStartingPoint(final int i) {
+        return MathArrays.copyOf(startingValues[i]);
+    }
+
+    /**
+     * Returns the least-squares problem corresponding to fitting the model to
+     * the specified data.
+     *
+     * @return the least-squares problem
+     */
+    public LeastSquaresProblem getLeastSquaresProblem() {
+        return problem;
+    }
+
+    /**
+     * Returns the value of the model for the specified values of the predictor
+     * variable and the parameters.
+     *
+     * @param x the predictor variable
+     * @param a the parameters
+     * @return the value of the model
+     */
+    public abstract double getModelValue(final double x, final double[] a);
+
+    /**
+     * Returns the values of the partial derivatives of the model with respect
+     * to the parameters.
+     *
+     * @param x the predictor variable
+     * @param a the parameters
+     * @return the partial derivatives
+     */
+    public abstract double[] getModelDerivatives(final double x,
+                                                 final double[] a);
+
+    /**
+     * <p>
+     * Parses the specified text lines, and extracts the indices of the first
+     * and last lines of the data defined by the specified {@code key}. This key
+     * must be one of
+     * </p>
+     * <ul>
+     * <li>{@code "Starting Values"},</li>
+     * <li>{@code "Certified Values"},</li>
+     * <li>{@code "Data"}.</li>
+     * </ul>
+     * <p>
+     * In the NIST data files, the line indices are separated by the keywords
+     * {@code "lines"} and {@code "to"}.
+     * </p>
+     *
+     * @param lines the line of text to be parsed
+     * @return an array of two {@code int}s. First value is the index of the
+     *         first line, second value is the index of the last line.
+     *         {@code null} if the line could not be parsed.
+     */
+    private static int[] findLineNumbers(final String key,
+                                         final Iterable<String> lines) {
+        for (String text : lines) {
+            boolean flag = text.contains(key) && text.contains("lines") &&
+                           text.contains("to") && text.contains(")");
+            if (flag) {
+                final int[] numbers = new int[2];
+                final String from = text.substring(text.indexOf("lines") + 5,
+                                                   text.indexOf("to"));
+                numbers[0] = Integer.parseInt(from.trim());
+                final String to = text.substring(text.indexOf("to") + 2,
+                                                 text.indexOf(")"));
+                numbers[1] = Integer.parseInt(to.trim());
+                return numbers;
+            }
+        }
+        return null;
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDataset.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDatasetFactory.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDatasetFactory.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDatasetFactory.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDatasetFactory.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,201 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.nonlinear.vector.jacobian;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import org.apache.commons.math3.util.FastMath;
+
+/**
+ * A factory to create instances of {@link StatisticalReferenceDataset} from
+ * available resources.
+ */
+public class StatisticalReferenceDatasetFactory {
+
+    private StatisticalReferenceDatasetFactory() {
+        // Do nothing
+    }
+
+    /**
+     * Creates a new buffered reader from the specified resource name.
+     *
+     * @param name the name of the resource
+     * @return a buffered reader
+     * @throws IOException if an I/O error occured
+     */
+    public static BufferedReader createBufferedReaderFromResource(final String name)
+        throws IOException {
+        final InputStream resourceAsStream;
+        resourceAsStream = StatisticalReferenceDatasetFactory.class
+            .getResourceAsStream(name);
+        if (resourceAsStream == null) {
+            throw new IOException("could not find resource " + name);
+        }
+        return new BufferedReader(new InputStreamReader(resourceAsStream));
+    }
+
+    public static StatisticalReferenceDataset createKirby2()
+        throws IOException {
+        final BufferedReader in = createBufferedReaderFromResource("Kirby2.dat");
+        StatisticalReferenceDataset dataset = null;
+        try {
+            dataset = new StatisticalReferenceDataset(in) {
+
+                @Override
+                public double getModelValue(final double x, final double[] a) {
+                    final double p = a[0] + x * (a[1] + x * a[2]);
+                    final double q = 1.0 + x * (a[3] + x * a[4]);
+                    return p / q;
+                }
+
+                @Override
+                public double[] getModelDerivatives(final double x,
+                                                    final double[] a) {
+                    final double[] dy = new double[5];
+                    final double p = a[0] + x * (a[1] + x * a[2]);
+                    final double q = 1.0 + x * (a[3] + x * a[4]);
+                    dy[0] = 1.0 / q;
+                    dy[1] = x / q;
+                    dy[2] = x * dy[1];
+                    dy[3] = -x * p / (q * q);
+                    dy[4] = x * dy[3];
+                    return dy;
+                }
+            };
+        } finally {
+            in.close();
+        }
+        return dataset;
+    }
+
+    public static StatisticalReferenceDataset createHahn1()
+        throws IOException {
+        final BufferedReader in = createBufferedReaderFromResource("Hahn1.dat");
+        StatisticalReferenceDataset dataset = null;
+        try {
+            dataset = new StatisticalReferenceDataset(in) {
+
+                @Override
+                public double getModelValue(final double x, final double[] a) {
+                    final double p = a[0] + x * (a[1] + x * (a[2] + x * a[3]));
+                    final double q = 1.0 + x * (a[4] + x * (a[5] + x * a[6]));
+                    return p / q;
+                }
+
+                @Override
+                public double[] getModelDerivatives(final double x,
+                                                    final double[] a) {
+                    final double[] dy = new double[7];
+                    final double p = a[0] + x * (a[1] + x * (a[2] + x * a[3]));
+                    final double q = 1.0 + x * (a[4] + x * (a[5] + x * a[6]));
+                    dy[0] = 1.0 / q;
+                    dy[1] = x * dy[0];
+                    dy[2] = x * dy[1];
+                    dy[3] = x * dy[2];
+                    dy[4] = -x * p / (q * q);
+                    dy[5] = x * dy[4];
+                    dy[6] = x * dy[5];
+                    return dy;
+                }
+            };
+        } finally {
+            in.close();
+        }
+        return dataset;
+    }
+
+    public static StatisticalReferenceDataset createMGH17()
+        throws IOException {
+        final BufferedReader in = createBufferedReaderFromResource("MGH17.dat");
+        StatisticalReferenceDataset dataset = null;
+        try {
+            dataset = new StatisticalReferenceDataset(in) {
+
+                @Override
+                public double getModelValue(final double x, final double[] a) {
+                    return a[0] + a[1] * FastMath.exp(-a[3] * x) + a[2] *
+                           FastMath.exp(-a[4] * x);
+                }
+
+                @Override
+                public double[] getModelDerivatives(final double x,
+                                                    final double[] a) {
+                    final double[] dy = new double[5];
+                    dy[0] = 1.0;
+                    dy[1] = FastMath.exp(-x * a[3]);
+                    dy[2] = FastMath.exp(-x * a[4]);
+                    dy[3] = -x * a[1] * dy[1];
+                    dy[4] = -x * a[2] * dy[2];
+                    return dy;
+                }
+            };
+        } finally {
+            in.close();
+        }
+        return dataset;
+    }
+
+    public static StatisticalReferenceDataset createLanczos1()
+        throws IOException {
+        final BufferedReader in =
+            createBufferedReaderFromResource("Lanczos1.dat");
+        StatisticalReferenceDataset dataset = null;
+        try {
+            dataset = new StatisticalReferenceDataset(in) {
+
+                @Override
+                public double getModelValue(final double x, final double[] a) {
+                    System.out.println(a[0]+", "+a[1]+", "+a[2]+", "+a[3]+", "+a[4]+", "+a[5]);
+                    return a[0] * FastMath.exp(-a[3] * x) +
+                           a[1] * FastMath.exp(-a[4] * x) +
+                           a[2] * FastMath.exp(-a[5] * x);
+                }
+
+                @Override
+                public double[] getModelDerivatives(final double x,
+                    final double[] a) {
+                    final double[] dy = new double[6];
+                    dy[0] = FastMath.exp(-x * a[3]);
+                    dy[1] = FastMath.exp(-x * a[4]);
+                    dy[2] = FastMath.exp(-x * a[5]);
+                    dy[3] = -x * a[0] * dy[0];
+                    dy[4] = -x * a[1] * dy[1];
+                    dy[5] = -x * a[2] * dy[2];
+                    return dy;
+                }
+            };
+        } finally {
+            in.close();
+        }
+        return dataset;
+    }
+
+    /**
+     * Returns an array with all available reference datasets.
+     *
+     * @return the array of datasets
+     * @throws IOException if an I/O error occurs
+     */
+    public StatisticalReferenceDataset[] createAll()
+        throws IOException {
+        return new StatisticalReferenceDataset[] {
+            createKirby2(), createMGH17()
+        };
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StatisticalReferenceDatasetFactory.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StraightLineProblem.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StraightLineProblem.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StraightLineProblem.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StraightLineProblem.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.optim.nonlinear.vector.jacobian;
+
+import java.util.ArrayList;
+import org.apache.commons.math3.analysis.MultivariateVectorFunction;
+import org.apache.commons.math3.analysis.MultivariateMatrixFunction;
+import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.stat.regression.SimpleRegression;
+import org.apache.commons.math3.optim.nonlinear.vector.ModelFunction;
+import org.apache.commons.math3.optim.nonlinear.vector.ModelFunctionJacobian;
+
+/**
+ * Class that models a straight line defined as {@code y = a x + b}.
+ * The parameters of problem are:
+ * <ul>
+ *  <li>{@code a}</li>
+ *  <li>{@code b}</li>
+ * </ul>
+ * The model functions are:
+ * <ul>
+ *  <li>for each pair (a, b), the y-coordinate of the line.</li>
+ * </ul>
+ */
+class StraightLineProblem {
+    /** Cloud of points assumed to be fitted by a straight line. */
+    private final ArrayList<double[]> points;
+    /** Error (on the y-coordinate of the points). */
+    private final double sigma;
+
+    /**
+     * @param error Assumed error for the y-coordinate.
+     */
+    public StraightLineProblem(double error) {
+        points = new ArrayList<double[]>();
+        sigma = error;
+    }
+
+    public void addPoint(double px, double py) {
+        points.add(new double[] { px, py });
+    }
+
+    /**
+     * @return the list of x-coordinates.
+     */
+    public double[] x() {
+        final double[] v = new double[points.size()];
+        for (int i = 0; i < points.size(); i++) {
+            final double[] p = points.get(i);
+            v[i] = p[0]; // x-coordinate.
+        }
+
+        return v;
+    }
+
+    /**
+     * @return the list of y-coordinates.
+     */
+    public double[] y() {
+        final double[] v = new double[points.size()];
+        for (int i = 0; i < points.size(); i++) {
+            final double[] p = points.get(i);
+            v[i] = p[1]; // y-coordinate.
+        }
+
+        return v;
+    }
+
+    public double[] target() {
+        return y();
+    }
+
+    public double[] weight() {
+        final double weight = 1 / (sigma * sigma);
+        final double[] w = new double[points.size()];
+        for (int i = 0; i < points.size(); i++) {
+            w[i] = weight;
+        }
+
+        return w;
+    }
+
+    public ModelFunction getModelFunction() {
+        return new ModelFunction(new MultivariateVectorFunction() {
+                public double[] value(double[] params) {
+                    final Model line = new Model(params[0], params[1]);
+
+                    final double[] model = new double[points.size()];
+                    for (int i = 0; i < points.size(); i++) {
+                        final double[] p = points.get(i);
+                        model[i] = line.value(p[0]);
+                    }
+
+                    return model;
+                }
+            });
+    }
+
+    public ModelFunctionJacobian getModelFunctionJacobian() {
+        return new ModelFunctionJacobian(new MultivariateMatrixFunction() {
+                public double[][] value(double[] point) {
+                    return jacobian(point);
+                }
+            });
+    }
+
+    /**
+     * Directly solve the linear problem, using the {@link SimpleRegression}
+     * class.
+     */
+    public double[] solve() {
+        final SimpleRegression regress = new SimpleRegression(true);
+        for (double[] d : points) {
+            regress.addData(d[0], d[1]);
+        }
+
+        final double[] result = { regress.getSlope(), regress.getIntercept() };
+        return result;
+    }
+
+    private double[][] jacobian(double[] params) {
+        final double[][] jacobian = new double[points.size()][2];
+
+        for (int i = 0; i < points.size(); i++) {
+            final double[] p = points.get(i);
+            // Partial derivative wrt "a".
+            jacobian[i][0] = p[0];
+            // Partial derivative wrt "b".
+            jacobian[i][1] = 1;
+        }
+
+        return jacobian;
+    }
+
+    /**
+     * Linear function.
+     */
+    public static class Model implements UnivariateFunction {
+        final double a;
+        final double b;
+
+        public Model(double a,
+                     double b) {
+            this.a = a;
+            this.b = b;
+        }
+
+        public double value(double x) {
+            return a * x + b;
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/nonlinear/vector/jacobian/StraightLineProblem.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BracketFinderTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BracketFinderTest.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BracketFinderTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BracketFinderTest.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.univariate;
+
+import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.optim.GoalType;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test for {@link BracketFinder}.
+ */
+public class BracketFinderTest {
+
+    @Test
+    public void testCubicMin() {
+        final BracketFinder bFind = new BracketFinder();
+        final UnivariateFunction func = new UnivariateFunction() {
+                public double value(double x) {
+                    if (x < -2) {
+                        return value(-2);
+                    }
+                    else  {
+                        return (x - 1) * (x + 2) * (x + 3);
+                    }
+                }
+            };
+
+        bFind.search(func, GoalType.MINIMIZE, -2 , -1);
+        final double tol = 1e-15;
+        // Comparing with results computed in Python.
+        Assert.assertEquals(-2, bFind.getLo(), tol);
+        Assert.assertEquals(-1, bFind.getMid(), tol);
+        Assert.assertEquals(0.61803399999999997, bFind.getHi(), tol);
+    }
+
+    @Test
+    public void testCubicMax() {
+        final BracketFinder bFind = new BracketFinder();
+        final UnivariateFunction func = new UnivariateFunction() {
+                public double value(double x) {
+                    if (x < -2) {
+                        return value(-2);
+                    }
+                    else  {
+                        return -(x - 1) * (x + 2) * (x + 3);
+                    }
+                }
+            };
+
+        bFind.search(func, GoalType.MAXIMIZE, -2 , -1);
+        final double tol = 1e-15;
+        Assert.assertEquals(-2, bFind.getLo(), tol);
+        Assert.assertEquals(-1, bFind.getMid(), tol);
+        Assert.assertEquals(0.61803399999999997, bFind.getHi(), tol);
+    }
+
+    @Test
+    public void testMinimumIsOnIntervalBoundary() {
+        final UnivariateFunction func = new UnivariateFunction() {
+                public double value(double x) {
+                    return x * x;
+                }
+            };
+
+        final BracketFinder bFind = new BracketFinder();
+
+        bFind.search(func, GoalType.MINIMIZE, 0, 1);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+
+        bFind.search(func, GoalType.MINIMIZE, -1, 0);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+    }
+
+    @Test
+    public void testIntervalBoundsOrdering() {
+        final UnivariateFunction func = new UnivariateFunction() {
+                public double value(double x) {
+                    return x * x;
+                }
+            };
+
+        final BracketFinder bFind = new BracketFinder();
+
+        bFind.search(func, GoalType.MINIMIZE, -1, 1);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+
+        bFind.search(func, GoalType.MINIMIZE, 1, -1);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+
+        bFind.search(func, GoalType.MINIMIZE, 1, 2);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+
+        bFind.search(func, GoalType.MINIMIZE, 2, 1);
+        Assert.assertTrue(bFind.getLo() <= 0);
+        Assert.assertTrue(0 <= bFind.getHi());
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BracketFinderTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BrentOptimizerTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BrentOptimizerTest.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BrentOptimizerTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BrentOptimizerTest.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,301 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.univariate;
+
+
+import org.apache.commons.math3.analysis.QuinticFunction;
+import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.analysis.function.Sin;
+import org.apache.commons.math3.analysis.function.StepFunction;
+import org.apache.commons.math3.analysis.FunctionUtils;
+import org.apache.commons.math3.exception.NumberIsTooLargeException;
+import org.apache.commons.math3.exception.NumberIsTooSmallException;
+import org.apache.commons.math3.exception.TooManyEvaluationsException;
+import org.apache.commons.math3.optim.ConvergenceChecker;
+import org.apache.commons.math3.optim.GoalType;
+import org.apache.commons.math3.optim.MaxEval;
+import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.commons.math3.util.FastMath;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * @version $Id$
+ */
+public final class BrentOptimizerTest {
+
+    @Test
+    public void testSinMin() {
+        UnivariateFunction f = new Sin();
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-10, 1e-14);
+        Assert.assertEquals(3 * Math.PI / 2, optimizer.optimize(new MaxEval(200),
+                                                                new UnivariateObjectiveFunction(f),
+                                                                GoalType.MINIMIZE,
+                                                                new SearchInterval(4, 5)).getPoint(), 1e-8);
+        Assert.assertTrue(optimizer.getEvaluations() <= 50);
+        Assert.assertEquals(200, optimizer.getMaxEvaluations());
+        Assert.assertEquals(3 * Math.PI / 2, optimizer.optimize(new MaxEval(200),
+                                                                new UnivariateObjectiveFunction(f),
+                                                                GoalType.MINIMIZE,
+                                                                new SearchInterval(1, 5)).getPoint(), 1e-8);
+        Assert.assertTrue(optimizer.getEvaluations() <= 100);
+        Assert.assertTrue(optimizer.getEvaluations() >= 15);
+        try {
+            optimizer.optimize(new MaxEval(10),
+                               new UnivariateObjectiveFunction(f),
+                               GoalType.MINIMIZE,
+                               new SearchInterval(4, 5));
+            Assert.fail("an exception should have been thrown");
+        } catch (TooManyEvaluationsException fee) {
+            // expected
+        }
+    }
+
+    @Test
+    public void testSinMinWithValueChecker() {
+        final UnivariateFunction f = new Sin();
+        final ConvergenceChecker<UnivariatePointValuePair> checker = new SimpleUnivariateValueChecker(1e-5, 1e-14);
+        // The default stopping criterion of Brent's algorithm should not
+        // pass, but the search will stop at the given relative tolerance
+        // for the function value.
+        final UnivariateOptimizer optimizer = new BrentOptimizer(1e-10, 1e-14, checker);
+        final UnivariatePointValuePair result = optimizer.optimize(new MaxEval(200),
+                                                                   new UnivariateObjectiveFunction(f),
+                                                                   GoalType.MINIMIZE,
+                                                                   new SearchInterval(4, 5));
+        Assert.assertEquals(3 * Math.PI / 2, result.getPoint(), 1e-3);
+    }
+
+    @Test
+    public void testBoundaries() {
+        final double lower = -1.0;
+        final double upper = +1.0;
+        UnivariateFunction f = new UnivariateFunction() {            
+            public double value(double x) {
+                if (x < lower) {
+                    throw new NumberIsTooSmallException(x, lower, true);
+                } else if (x > upper) {
+                    throw new NumberIsTooLargeException(x, upper, true);
+                } else {
+                    return x;
+                }
+            }
+        };
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-10, 1e-14);
+        Assert.assertEquals(lower,
+                            optimizer.optimize(new MaxEval(100),
+                                               new UnivariateObjectiveFunction(f),
+                                               GoalType.MINIMIZE,
+                                               new SearchInterval(lower, upper)).getPoint(),
+                            1.0e-8);
+        Assert.assertEquals(upper,
+                            optimizer.optimize(new MaxEval(100),
+                                               new UnivariateObjectiveFunction(f),
+                                               GoalType.MAXIMIZE,
+                                               new SearchInterval(lower, upper)).getPoint(),
+                            1.0e-8);
+    }
+
+    @Test
+    public void testQuinticMin() {
+        // The function has local minima at -0.27195613 and 0.82221643.
+        UnivariateFunction f = new QuinticFunction();
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-10, 1e-14);
+        Assert.assertEquals(-0.27195613, optimizer.optimize(new MaxEval(200),
+                                                            new UnivariateObjectiveFunction(f),
+                                                            GoalType.MINIMIZE,
+                                                            new SearchInterval(-0.3, -0.2)).getPoint(), 1.0e-8);
+        Assert.assertEquals( 0.82221643, optimizer.optimize(new MaxEval(200),
+                                                            new UnivariateObjectiveFunction(f),
+                                                            GoalType.MINIMIZE,
+                                                            new SearchInterval(0.3,  0.9)).getPoint(), 1.0e-8);
+        Assert.assertTrue(optimizer.getEvaluations() <= 50);
+
+        // search in a large interval
+        Assert.assertEquals(-0.27195613, optimizer.optimize(new MaxEval(200),
+                                                            new UnivariateObjectiveFunction(f),
+                                                            GoalType.MINIMIZE,
+                                                            new SearchInterval(-1.0, 0.2)).getPoint(), 1.0e-8);
+        Assert.assertTrue(optimizer.getEvaluations() <= 50);
+    }
+
+    @Test
+    public void testQuinticMinStatistics() {
+        // The function has local minima at -0.27195613 and 0.82221643.
+        UnivariateFunction f = new QuinticFunction();
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-11, 1e-14);
+
+        final DescriptiveStatistics[] stat = new DescriptiveStatistics[2];
+        for (int i = 0; i < stat.length; i++) {
+            stat[i] = new DescriptiveStatistics();
+        }
+
+        final double min = -0.75;
+        final double max = 0.25;
+        final int nSamples = 200;
+        final double delta = (max - min) / nSamples;
+        for (int i = 0; i < nSamples; i++) {
+            final double start = min + i * delta;
+            stat[0].addValue(optimizer.optimize(new MaxEval(40),
+                                                new UnivariateObjectiveFunction(f),
+                                                GoalType.MINIMIZE,
+                                                new SearchInterval(min, max, start)).getPoint());
+            stat[1].addValue(optimizer.getEvaluations());
+        }
+
+        final double meanOptValue = stat[0].getMean();
+        final double medianEval = stat[1].getPercentile(50);
+        Assert.assertTrue(meanOptValue > -0.2719561281);
+        Assert.assertTrue(meanOptValue < -0.2719561280);
+        Assert.assertEquals(23, (int) medianEval);
+    }
+
+    @Test
+    public void testQuinticMax() {
+        // The quintic function has zeros at 0, +-0.5 and +-1.
+        // The function has a local maximum at 0.27195613.
+        UnivariateFunction f = new QuinticFunction();
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-12, 1e-14);
+        Assert.assertEquals(0.27195613, optimizer.optimize(new MaxEval(100),
+                                                           new UnivariateObjectiveFunction(f),
+                                                           GoalType.MAXIMIZE,
+                                                           new SearchInterval(0.2, 0.3)).getPoint(), 1e-8);
+        try {
+            optimizer.optimize(new MaxEval(5),
+                               new UnivariateObjectiveFunction(f),
+                               GoalType.MAXIMIZE,
+                               new SearchInterval(0.2, 0.3));
+            Assert.fail("an exception should have been thrown");
+        } catch (TooManyEvaluationsException miee) {
+            // expected
+        }
+    }
+
+    @Test
+    public void testMinEndpoints() {
+        UnivariateFunction f = new Sin();
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-8, 1e-14);
+
+        // endpoint is minimum
+        double result = optimizer.optimize(new MaxEval(50),
+                                           new UnivariateObjectiveFunction(f),
+                                           GoalType.MINIMIZE,
+                                           new SearchInterval(3 * Math.PI / 2, 5)).getPoint();
+        Assert.assertEquals(3 * Math.PI / 2, result, 1e-6);
+
+        result = optimizer.optimize(new MaxEval(50),
+                                    new UnivariateObjectiveFunction(f),
+                                    GoalType.MINIMIZE,
+                                    new SearchInterval(4, 3 * Math.PI / 2)).getPoint();
+        Assert.assertEquals(3 * Math.PI / 2, result, 1e-6);
+    }
+
+    @Test
+    public void testMath832() {
+        final UnivariateFunction f = new UnivariateFunction() {
+                public double value(double x) {
+                    final double sqrtX = FastMath.sqrt(x);
+                    final double a = 1e2 * sqrtX;
+                    final double b = 1e6 / x;
+                    final double c = 1e4 / sqrtX;
+
+                    return a + b + c;
+                }
+            };
+
+        UnivariateOptimizer optimizer = new BrentOptimizer(1e-10, 1e-8);
+        final double result = optimizer.optimize(new MaxEval(1483),
+                                                 new UnivariateObjectiveFunction(f),
+                                                 GoalType.MINIMIZE,
+                                                 new SearchInterval(Double.MIN_VALUE,
+                                                                    Double.MAX_VALUE)).getPoint();
+
+        Assert.assertEquals(804.9355825, result, 1e-6);
+    }
+
+    /**
+     * Contrived example showing that prior to the resolution of MATH-855
+     * (second revision), the algorithm would not return the best point if
+     * it happened to be the initial guess.
+     */
+    @Test
+    public void testKeepInitIfBest() {
+        final double minSin = 3 * Math.PI / 2;
+        final double offset = 1e-8;
+        final double delta = 1e-7;
+        final UnivariateFunction f1 = new Sin();
+        final UnivariateFunction f2 = new StepFunction(new double[] { minSin, minSin + offset, minSin + 2 * offset},
+                                                       new double[] { 0, -1, 0 });
+        final UnivariateFunction f = FunctionUtils.add(f1, f2);
+        // A slightly less stringent tolerance would make the test pass
+        // even with the previous implementation.
+        final double relTol = 1e-8;
+        final UnivariateOptimizer optimizer = new BrentOptimizer(relTol, 1e-100);
+        final double init = minSin + 1.5 * offset;
+        final UnivariatePointValuePair result
+            = optimizer.optimize(new MaxEval(200),
+                                 new UnivariateObjectiveFunction(f),
+                                 GoalType.MINIMIZE,
+                                 new SearchInterval(minSin - 6.789 * delta,
+                                                    minSin + 9.876 * delta,
+                                                    init));
+        final int numEval = optimizer.getEvaluations();
+
+        final double sol = result.getPoint();
+        final double expected = init;
+
+//         System.out.println("numEval=" + numEval);
+//         System.out.println("min=" + init + " f=" + f.value(init));
+//         System.out.println("sol=" + sol + " f=" + f.value(sol));
+//         System.out.println("exp=" + expected + " f=" + f.value(expected));
+
+        Assert.assertTrue("Best point not reported", f.value(sol) <= f.value(expected));
+    }
+
+    /**
+     * Contrived example showing that prior to the resolution of MATH-855,
+     * the algorithm, by always returning the last evaluated point, would
+     * sometimes not report the best point it had found.
+     */
+    @Test
+    public void testMath855() {
+        final double minSin = 3 * Math.PI / 2;
+        final double offset = 1e-8;
+        final double delta = 1e-7;
+        final UnivariateFunction f1 = new Sin();
+        final UnivariateFunction f2 = new StepFunction(new double[] { minSin, minSin + offset, minSin + 5 * offset },
+                                                       new double[] { 0, -1, 0 });
+        final UnivariateFunction f = FunctionUtils.add(f1, f2);
+        final UnivariateOptimizer optimizer = new BrentOptimizer(1e-8, 1e-100);
+        final UnivariatePointValuePair result
+            = optimizer.optimize(new MaxEval(200),
+                                 new UnivariateObjectiveFunction(f),
+                                 GoalType.MINIMIZE,
+                                 new SearchInterval(minSin - 6.789 * delta,
+                                                    minSin + 9.876 * delta));
+        final int numEval = optimizer.getEvaluations();
+
+        final double sol = result.getPoint();
+        final double expected = 4.712389027602411;
+
+        // System.out.println("min=" + (minSin + offset) + " f=" + f.value(minSin + offset));
+        // System.out.println("sol=" + sol + " f=" + f.value(sol));
+        // System.out.println("exp=" + expected + " f=" + f.value(expected));
+
+        Assert.assertTrue("Best point not reported", f.value(sol) <= f.value(expected));
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/BrentOptimizerTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/MultiStartUnivariateOptimizerTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/MultiStartUnivariateOptimizerTest.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/MultiStartUnivariateOptimizerTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/MultiStartUnivariateOptimizerTest.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.univariate;
+
+import org.apache.commons.math3.analysis.QuinticFunction;
+import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.analysis.function.Sin;
+import org.apache.commons.math3.optim.GoalType;
+import org.apache.commons.math3.optim.MaxEval;
+import org.apache.commons.math3.random.JDKRandomGenerator;
+import org.apache.commons.math3.util.FastMath;
+import org.apache.commons.math3.exception.MathIllegalStateException;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class MultiStartUnivariateOptimizerTest {
+    @Test(expected=MathIllegalStateException.class)
+    public void testMissingMaxEval() {
+        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
+        JDKRandomGenerator g = new JDKRandomGenerator();
+        g.setSeed(44428400075l);
+        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 10, g);
+        optimizer.optimize(new UnivariateObjectiveFunction(new Sin()),
+                           GoalType.MINIMIZE,
+                           new SearchInterval(-1, 1));
+    }
+    @Test(expected=MathIllegalStateException.class)
+    public void testMissingSearchInterval() {
+        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
+        JDKRandomGenerator g = new JDKRandomGenerator();
+        g.setSeed(44428400075l);
+        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 10, g);
+        optimizer.optimize(new MaxEval(300),
+                           new UnivariateObjectiveFunction(new Sin()),
+                           GoalType.MINIMIZE);
+    }
+
+    @Test
+    public void testSinMin() {
+        UnivariateFunction f = new Sin();
+        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
+        JDKRandomGenerator g = new JDKRandomGenerator();
+        g.setSeed(44428400075l);
+        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 10, g);
+        optimizer.optimize(new MaxEval(300),
+                           new UnivariateObjectiveFunction(f),
+                           GoalType.MINIMIZE,
+                           new SearchInterval(-100.0, 100.0));
+        UnivariatePointValuePair[] optima = optimizer.getOptima();
+        for (int i = 1; i < optima.length; ++i) {
+            double d = (optima[i].getPoint() - optima[i-1].getPoint()) / (2 * FastMath.PI);
+            Assert.assertTrue(FastMath.abs(d - FastMath.rint(d)) < 1.0e-8);
+            Assert.assertEquals(-1.0, f.value(optima[i].getPoint()), 1.0e-10);
+            Assert.assertEquals(f.value(optima[i].getPoint()), optima[i].getValue(), 1.0e-10);
+        }
+        Assert.assertTrue(optimizer.getEvaluations() > 200);
+        Assert.assertTrue(optimizer.getEvaluations() < 300);
+    }
+
+    @Test
+    public void testQuinticMin() {
+        // The quintic function has zeros at 0, +-0.5 and +-1.
+        // The function has extrema (first derivative is zero) at 0.27195613 and 0.82221643,
+        UnivariateFunction f = new QuinticFunction();
+        UnivariateOptimizer underlying = new BrentOptimizer(1e-9, 1e-14);
+        JDKRandomGenerator g = new JDKRandomGenerator();
+        g.setSeed(4312000053L);
+        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 5, g);
+
+        UnivariatePointValuePair optimum
+            = optimizer.optimize(new MaxEval(300),
+                                 new UnivariateObjectiveFunction(f),
+                                 GoalType.MINIMIZE,
+                                 new SearchInterval(-0.3, -0.2));
+        Assert.assertEquals(-0.27195613, optimum.getPoint(), 1e-9);
+        Assert.assertEquals(-0.0443342695, optimum.getValue(), 1e-9);
+
+        UnivariatePointValuePair[] optima = optimizer.getOptima();
+        for (int i = 0; i < optima.length; ++i) {
+            Assert.assertEquals(f.value(optima[i].getPoint()), optima[i].getValue(), 1e-9);
+        }
+        Assert.assertTrue(optimizer.getEvaluations() >= 50);
+        Assert.assertTrue(optimizer.getEvaluations() <= 100);
+    }
+
+    @Test
+    public void testBadFunction() {
+        UnivariateFunction f = new UnivariateFunction() {
+                public double value(double x) {
+                    if (x < 0) {
+                        throw new LocalException();
+                    }
+                    return 0;
+                }
+            };
+        UnivariateOptimizer underlying = new BrentOptimizer(1e-9, 1e-14);
+        JDKRandomGenerator g = new JDKRandomGenerator();
+        g.setSeed(4312000053L);
+        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 5, g);
+ 
+        try {
+            optimizer.optimize(new MaxEval(300),
+                               new UnivariateObjectiveFunction(f),
+                               GoalType.MINIMIZE,
+                               new SearchInterval(-0.3, -0.2));
+            Assert.fail();
+        } catch (LocalException e) {
+            // Expected.
+        }
+
+        // Ensure that the exception was thrown because no optimum was found.
+        Assert.assertTrue(optimizer.getOptima()[0] == null);
+    }
+
+    private static class LocalException extends RuntimeException {
+        private static final long serialVersionUID = 1194682757034350629L;
+    }
+
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/MultiStartUnivariateOptimizerTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/SimpleUnivariateValueCheckerTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/SimpleUnivariateValueCheckerTest.java?rev=1420684&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/SimpleUnivariateValueCheckerTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/SimpleUnivariateValueCheckerTest.java Wed Dec 12 14:10:38 2012
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.univariate;
+
+import org.apache.commons.math3.exception.NotStrictlyPositiveException;
+import org.junit.Test;
+import org.junit.Assert;
+
+public class SimpleUnivariateValueCheckerTest {
+    @Test(expected=NotStrictlyPositiveException.class)
+    public void testIterationCheckPrecondition() {
+        new SimpleUnivariateValueChecker(1e-1, 1e-2, 0);
+    }
+
+    @Test
+    public void testIterationCheck() {
+        final int max = 10;
+        final SimpleUnivariateValueChecker checker = new SimpleUnivariateValueChecker(1e-1, 1e-2, max);
+        Assert.assertTrue(checker.converged(max, null, null)); 
+        Assert.assertTrue(checker.converged(max + 1, null, null));
+    }
+
+    @Test
+    public void testIterationCheckDisabled() {
+        final SimpleUnivariateValueChecker checker = new SimpleUnivariateValueChecker(1e-8, 1e-8);
+
+        final UnivariatePointValuePair a = new UnivariatePointValuePair(1d, 1d);
+        final UnivariatePointValuePair b = new UnivariatePointValuePair(10d, 10d);
+
+        Assert.assertFalse(checker.converged(-1, a, b));
+        Assert.assertFalse(checker.converged(0, a, b));
+        Assert.assertFalse(checker.converged(1000000, a, b));
+
+        Assert.assertTrue(checker.converged(-1, a, a));
+        Assert.assertTrue(checker.converged(-1, b, b));
+    }
+
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/optim/univariate/SimpleUnivariateValueCheckerTest.java
------------------------------------------------------------------------------
    svn:eol-style = native