You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by er...@apache.org on 2014/01/10 23:01:28 UTC

svn commit: r1557267 [2/3] - in /commons/proper/math/trunk: ./ src/changes/ src/main/java/org/apache/commons/math3/ml/neuralnet/ src/main/java/org/apache/commons/math3/ml/neuralnet/oned/ src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/ src/mai...

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;
+
+import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
+import org.apache.commons.math3.ml.neuralnet.sofm.util.QuasiSigmoidDecayFunction;
+import org.apache.commons.math3.exception.OutOfRangeException;
+
+/**
+ * Factory for creating instances of {@link LearningFactorFunction}.
+ *
+ * @version $Id$
+ */
+public class LearningFactorFunctionFactory {
+    /** Class contains only static methods. */
+    private LearningFactorFunctionFactory() {}
+
+    /**
+     * Creates an exponential decay {@link LearningFactorFunction function}.
+     * It will compute <code>a e<sup>-x / b</sup></code>,
+     * where {@code x} is the (integer) independent variable and
+     * <ul>
+     *  <li><code>a = initValue</code>
+     *  <li><code>b = -numCall / ln(valueAtNumCall / initValue)</code>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e.
+     * {@link LearningFactorFunction#value(long) value(0)}.
+     * @param valueAtNumCall Value of the function at {@code numCall}.
+     * @param numCall Argument for which the function returns
+     * {@code valueAtNumCall}.
+     * @return the learning factor function.
+     * @throws org.apache.commons.math3.exception.OutOfRangeException
+     * if {@code initValue <= 0} or {@code initValue > 1}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code valueAtNumCall <= 0}.
+     * @throws org.apache.commons.math3.exception.NumberIsTooLargeException
+     * if {@code valueAtNumCall >= initValue}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code numCall <= 0}.
+     */
+    public static LearningFactorFunction exponentialDecay(final double initValue,
+                                                          final double valueAtNumCall,
+                                                          final long numCall) {
+        if (initValue <= 0 ||
+            initValue > 1) {
+            throw new OutOfRangeException(initValue, 0, 1);
+        }
+
+        return new LearningFactorFunction() {
+            /** DecayFunction. */
+            private final ExponentialDecayFunction decay
+                = new ExponentialDecayFunction(initValue, valueAtNumCall, numCall);
+
+            /** {@inheritDoc} */
+            @Override
+            public double value(long n) {
+                return decay.value(n);
+            }
+        };
+    }
+
+    /**
+     * Creates an sigmoid-like {@code LearningFactorFunction function}.
+     * The function {@code f} will have the following properties:
+     * <ul>
+     *  <li>{@code f(0) = initValue}</li>
+     *  <li>{@code numCall} is the inflexion point</li>
+     *  <li>{@code slope = f'(numCall)}</li>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e.
+     * {@link LearningFactorFunction#value(long) value(0)}.
+     * @param slope Value of the function derivative at {@code numCall}.
+     * @param numCall Inflexion point.
+     * @return the learning factor function.
+     * @throws org.apache.commons.math3.exception.OutOfRangeException
+     * if {@code initValue <= 0} or {@code initValue > 1}.
+     * @throws org.apache.commons.math3.exception.NumberIsTooLargeException
+     * if {@code slope >= 0}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code numCall <= 0}.
+     */
+    public static LearningFactorFunction quasiSigmoidDecay(final double initValue,
+                                                           final double slope,
+                                                           final long numCall) {
+        if (initValue <= 0 ||
+            initValue > 1) {
+            throw new OutOfRangeException(initValue, 0, 1);
+        }
+
+        return new LearningFactorFunction() {
+            /** DecayFunction. */
+            private final QuasiSigmoidDecayFunction decay
+                = new QuasiSigmoidDecayFunction(initValue, slope, numCall);
+
+            /** {@inheritDoc} */
+            @Override
+            public double value(long n) {
+                return decay.value(n);
+            }
+        };
+    }
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/LearningFactorFunctionFactory.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;
+
+/**
+ * Provides the network neighbourhood's size as a function of the
+ * number of calls already performed during the learning task.
+ * The "neighbourhood" is the set of neurons that can be reached
+ * by traversing at most the number of links returned by this
+ * function.
+ *
+ * @version $Id$
+ */
+public interface NeighbourhoodSizeFunction {
+    /**
+     * Computes the neighbourhood size at the current call.
+     *
+     * @param numCall Current step of the training task.
+     * @return the value of the function at {@code numCall}.
+     */
+    int value(long numCall);
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunction.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;
+
+import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
+import org.apache.commons.math3.ml.neuralnet.sofm.util.QuasiSigmoidDecayFunction;
+import org.apache.commons.math3.util.FastMath;
+
+/**
+ * Factory for creating instances of {@link NeighbourhoodSizeFunction}.
+ *
+ * @version $Id$
+ */
+public class NeighbourhoodSizeFunctionFactory {
+    /** Class contains only static methods. */
+    private NeighbourhoodSizeFunctionFactory() {}
+
+    /**
+     * Creates an exponential decay {@link NeighbourhoodSizeFunction function}.
+     * It will compute <code>a e<sup>-x / b</sup></code>,
+     * where {@code x} is the (integer) independent variable and
+     * <ul>
+     *  <li><code>a = initValue</code>
+     *  <li><code>b = -numCall / ln(valueAtNumCall / initValue)</code>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e.
+     * {@link NeighbourhoodSizeFunction#value(long) value(0)}.
+     * @param valueAtNumCall Value of the function at {@code numCall}.
+     * @param numCall Argument for which the function returns
+     * {@code valueAtNumCall}.
+     * @return the neighbourhood size function.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code initValue <= 0}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code valueAtNumCall <= 0}.
+     * @throws org.apache.commons.math3.exception.NumberIsTooLargeException
+     * if {@code valueAtNumCall >= initValue}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code numCall <= 0}.
+     */
+    public static NeighbourhoodSizeFunction exponentialDecay(final double initValue,
+                                                             final double valueAtNumCall,
+                                                             final long numCall) {
+        return new NeighbourhoodSizeFunction() {
+            /** DecayFunction. */
+            private final ExponentialDecayFunction decay
+                = new ExponentialDecayFunction(initValue, valueAtNumCall, numCall);
+
+            /** {@inheritDoc} */
+            @Override
+            public int value(long n) {
+                return (int) FastMath.rint(decay.value(n));
+            }
+        };
+    }
+
+    /**
+     * Creates an sigmoid-like {@code NeighbourhoodSizeFunction function}.
+     * The function {@code f} will have the following properties:
+     * <ul>
+     *  <li>{@code f(0) = initValue}</li>
+     *  <li>{@code numCall} is the inflexion point</li>
+     *  <li>{@code slope = f'(numCall)}</li>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e.
+     * {@link NeighbourhoodSizeFunction#value(long) value(0)}.
+     * @param slope Value of the function derivative at {@code numCall}.
+     * @param numCall Inflexion point.
+     * @return the neighbourhood size function.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code initValue <= 0}.
+     * @throws org.apache.commons.math3.exception.NumberIsTooLargeException
+     * if {@code slope >= 0}.
+     * @throws org.apache.commons.math3.exception.NotStrictlyPositiveException
+     * if {@code numCall <= 0}.
+     */
+    public static NeighbourhoodSizeFunction quasiSigmoidDecay(final double initValue,
+                                                              final double slope,
+                                                              final long numCall) {
+        return new NeighbourhoodSizeFunction() {
+            /** DecayFunction. */
+            private final QuasiSigmoidDecayFunction decay
+                = new QuasiSigmoidDecayFunction(initValue, slope, numCall);
+
+            /** {@inheritDoc} */
+            @Override
+            public int value(long n) {
+                return (int) FastMath.rint(decay.value(n));
+            }
+        };
+    }
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/NeighbourhoodSizeFunctionFactory.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Self Organizing Feature Map.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/package-info.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm.util;
+
+import org.apache.commons.math3.exception.NotStrictlyPositiveException;
+import org.apache.commons.math3.exception.NumberIsTooLargeException;
+import org.apache.commons.math3.util.FastMath;
+
+/**
+ * Exponential decay function: <code>a e<sup>-x / b</sup></code>,
+ * where {@code x} is the (integer) independent variable.
+ * <br/>
+ * Class is immutable.
+ *
+ * @version $Id$
+ */
+public class ExponentialDecayFunction {
+    /** Factor {@code a}. */
+    private final double a;
+    /** Factor {@code 1 / b}. */
+    private final double oneOverB;
+
+    /**
+     * Creates an instance. It will be such that
+     * <ul>
+     *  <li>{@code a = initValue}</li>
+     *  <li>{@code b = -numCall / ln(valueAtNumCall / initValue)}</li>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e. {@link #value(long) value(0)}.
+     * @param valueAtNumCall Value of the function at {@code numCall}.
+     * @param numCall Argument for which the function returns
+     * {@code valueAtNumCall}.
+     * @throws NotStrictlyPositiveException if {@code initValue <= 0}.
+     * @throws NotStrictlyPositiveException if {@code valueAtNumCall <= 0}.
+     * @throws NumberIsTooLargeException if {@code valueAtNumCall >= initValue}.
+     * @throws NotStrictlyPositiveException if {@code numCall <= 0}.
+     */
+    public ExponentialDecayFunction(double initValue,
+                                    double valueAtNumCall,
+                                    long numCall) {
+        if (initValue <= 0) {
+            throw new NotStrictlyPositiveException(initValue);
+        }
+        if (valueAtNumCall <= 0) {
+            throw new NotStrictlyPositiveException(valueAtNumCall);
+        }
+        if (valueAtNumCall >= initValue) {
+            throw new NumberIsTooLargeException(valueAtNumCall, initValue, false);
+        }
+        if (numCall <= 0) {
+            throw new NotStrictlyPositiveException(numCall);
+        }
+
+        a = initValue;
+        oneOverB = -FastMath.log(valueAtNumCall / initValue) / numCall;
+    }
+
+    /**
+     * Computes <code>a e<sup>-numCall / b</sup></code>.
+     *
+     * @param numCall Current step of the training task.
+     * @return the value of the function at {@code numCall}.
+     */
+    public double value(long numCall) {
+        return a * FastMath.exp(-numCall * oneOverB);
+    }
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/ExponentialDecayFunction.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm.util;
+
+import org.apache.commons.math3.exception.NotStrictlyPositiveException;
+import org.apache.commons.math3.exception.NumberIsTooLargeException;
+import org.apache.commons.math3.analysis.function.Logistic;
+
+/**
+ * Decay function whose shape is similar to a sigmoid.
+ * <br/>
+ * Class is immutable.
+ *
+ * @version $Id$
+ */
+public class QuasiSigmoidDecayFunction {
+    /** Sigmoid. */
+    private final Logistic sigmoid;
+    /** See {@link #value(long)}. */
+    private final double scale;
+
+    /**
+     * Creates an instance.
+     * The function {@code f} will have the following properties:
+     * <ul>
+     *  <li>{@code f(0) = initValue}</li>
+     *  <li>{@code numCall} is the inflexion point</li>
+     *  <li>{@code slope = f'(numCall)}</li>
+     * </ul>
+     *
+     * @param initValue Initial value, i.e. {@link #value(long) value(0)}.
+     * @param slope Value of the function derivative at {@code numCall}.
+     * @param numCall Inflexion point.
+     * @throws NotStrictlyPositiveException if {@code initValue <= 0}.
+     * @throws NumberIsTooLargeException if {@code slope >= 0}.
+     * @throws NotStrictlyPositiveException if {@code numCall <= 0}.
+     */
+    public QuasiSigmoidDecayFunction(double initValue,
+                                     double slope,
+                                     long numCall) {
+        if (initValue <= 0) {
+            throw new NotStrictlyPositiveException(initValue);
+        }
+        if (slope >= 0) {
+            throw new NumberIsTooLargeException(slope, 0, false);
+        }
+        if (numCall <= 1) {
+            throw new NotStrictlyPositiveException(numCall);
+        }
+
+        final double k = initValue;
+        final double m = numCall;
+        final double b = 4 * slope / initValue;
+        final double q = 1;
+        final double a = 0;
+        final double n = 1;
+        sigmoid = new Logistic(k, m, b, q, a, n);
+
+        final double y0 = sigmoid.value(0);
+        scale = k / y0;
+    }
+
+    /**
+     * Computes the value of the learning factor.
+     *
+     * @param numCall Current step of the training task.
+     * @return the value of the function at {@code numCall}.
+     */
+    public double value(long numCall) {
+        return scale * sigmoid.value(numCall);
+    }
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/QuasiSigmoidDecayFunction.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Miscellaneous utilities.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm.util;

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/sofm/util/package-info.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,433 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.twod;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.io.Serializable;
+import java.io.ObjectInputStream;
+import org.apache.commons.math3.ml.neuralnet.Neuron;
+import org.apache.commons.math3.ml.neuralnet.Network;
+import org.apache.commons.math3.ml.neuralnet.FeatureInitializer;
+import org.apache.commons.math3.ml.neuralnet.SquareNeighbourhood;
+import org.apache.commons.math3.exception.NumberIsTooSmallException;
+import org.apache.commons.math3.exception.OutOfRangeException;
+import org.apache.commons.math3.exception.MathInternalError;
+
+/**
+ * Neural network with the topology of a two-dimensional surface.
+ * Each neuron defines one surface element.
+ * <br/>
+ * This network is primarily intended to represent a
+ * <a href="http://en.wikipedia.org/wiki/Kohonen">
+ *  Self Organizing Feature Map</a>.
+ *
+ * @see org.apache.commons.math3.ml.neuralnet.sofm
+ * @version $Id$
+ */
+public class NeuronSquareMesh2D implements Serializable {
+    /** Underlying network. */
+    private final Network network;
+    /** Number of rows. */
+    private final int numberOfRows;
+    /** Number of columns. */
+    private final int numberOfColumns;
+    /** Wrap. */
+    private final boolean wrapRows;
+    /** Wrap. */
+    private final boolean wrapColumns;
+    /** Neighbourhood type. */
+    private final SquareNeighbourhood neighbourhood;
+    /**
+     * Mapping of the 2D coordinates (in the rectangular mesh) to
+     * the neuron identifiers (attributed by the {@link #network}
+     * instance).
+     */
+    private final long[][] identifiers;
+
+    /**
+     * Constructor with restricted access, solely used for deserialization.
+     *
+     * @param wrapRowDim Whether to wrap the first dimension (i.e the first
+     * and last neurons will be linked together).
+     * @param wrapColDim Whether to wrap the second dimension (i.e the first
+     * and last neurons will be linked together).
+     * @param neighbourhoodType Neighbourhood type.
+     * @param featuresList Arrays that will initialize the features sets of
+     * the network's neurons.
+     * @throws NumberIsTooSmallException if {@code numRows < 2} or
+     * {@code numCols < 2}.
+     */
+    NeuronSquareMesh2D(boolean wrapRowDim,
+                       boolean wrapColDim,
+                       SquareNeighbourhood neighbourhoodType,
+                       double[][][] featuresList) {
+        numberOfRows = featuresList.length;
+        numberOfColumns = featuresList[0].length;
+
+        if (numberOfRows < 2) {
+            throw new NumberIsTooSmallException(numberOfRows, 2, true);
+        }
+        if (numberOfColumns < 2) {
+            throw new NumberIsTooSmallException(numberOfColumns, 2, true);
+        }
+
+        wrapRows = wrapRowDim;
+        wrapColumns = wrapColDim;
+        neighbourhood = neighbourhoodType;
+
+        final int fLen = featuresList[0][0].length;
+        network = new Network(0, fLen);
+        identifiers = new long[numberOfRows][numberOfColumns];
+
+        // Add neurons.
+        for (int i = 0; i < numberOfRows; i++) {
+            for (int j = 0; j < numberOfColumns; j++) {
+                identifiers[i][j] = network.createNeuron(featuresList[i][j]);
+            }
+        }
+
+        // Add links.
+        createLinks();
+    }
+
+    /**
+     * Creates a two-dimensional network composed of square cells:
+     * Each neuron not located on the border of the mesh has four
+     * neurons linked to it.
+     * <br/>
+     * The links are bi-directional.
+     * <br/>
+     * The topology of the network can also be a cylinder (if one
+     * of the dimensions is wrapped) or a torus (if both dimensions
+     * are wrapped).
+     *
+     * @param numRows Number of neurons in the first dimension.
+     * @param wrapRowDim Whether to wrap the first dimension (i.e the first
+     * and last neurons will be linked together).
+     * @param numCols Number of neurons in the second dimension.
+     * @param wrapColDim Whether to wrap the second dimension (i.e the first
+     * and last neurons will be linked together).
+     * @param neighbourhoodType Neighbourhood type.
+     * @param featureInit Array of functions that will initialize the
+     * corresponding element of the features set of each newly created
+     * neuron. In particular, the size of this array defines the size of
+     * feature set.
+     * @throws NumberIsTooSmallException if {@code numRows < 2} or
+     * {@code numCols < 2}.
+     */
+    public NeuronSquareMesh2D(int numRows,
+                              boolean wrapRowDim,
+                              int numCols,
+                              boolean wrapColDim,
+                              SquareNeighbourhood neighbourhoodType,
+                              FeatureInitializer[] featureInit) {
+        if (numRows < 2) {
+            throw new NumberIsTooSmallException(numRows, 2, true);
+        }
+        if (numCols < 2) {
+            throw new NumberIsTooSmallException(numCols, 2, true);
+        }
+
+        numberOfRows = numRows;
+        wrapRows = wrapRowDim;
+        numberOfColumns = numCols;
+        wrapColumns = wrapColDim;
+        neighbourhood = neighbourhoodType;
+        identifiers = new long[numberOfRows][numberOfColumns];
+
+        final int fLen = featureInit.length;
+        network = new Network(0, fLen);
+
+        // Add neurons.
+        for (int i = 0; i < numRows; i++) {
+            for (int j = 0; j < numCols; j++) {
+                final double[] features = new double[fLen];
+                for (int fIndex = 0; fIndex < fLen; fIndex++) {
+                    features[fIndex] = featureInit[fIndex].value();
+                }
+                identifiers[i][j] = network.createNeuron(features);
+            }
+        }
+
+        // Add links.
+        createLinks();
+    }
+
+    /**
+     * Retrieves the underlying network.
+     * A reference is returned (enabling, for example, the network to be
+     * trained).
+     * This also implies that calling methods that modify the {@link Network}
+     * topology may cause this class to become inconsistent.
+     *
+     * @return the network.
+     */
+    public Network getNetwork() {
+        return network;
+    }
+
+    /**
+     * Gets the number of neurons in each row of this map.
+     *
+     * @return the number of rows.
+     */
+    public int getNumberOfRows() {
+        return numberOfRows;
+    }
+
+    /**
+     * Gets the number of neurons in each column of this map.
+     *
+     * @return the number of column.
+     */
+    public int getNumberOfColumns() {
+        return numberOfColumns;
+    }
+
+    /**
+     * Retrieves the neuron at location {@code (i, j)} in the map.
+     *
+     * @param i Row index.
+     * @param j Column index.
+     * @return the neuron at {@code (i, j)}.
+     * @throws OutOfRangeException if {@code i} or {@code j} is
+     * out of range.
+     */
+    public Neuron getNeuron(int i,
+                            int j) {
+        if (i < 0 ||
+            i >= numberOfRows) {
+            throw new OutOfRangeException(i, 0, numberOfRows - 1);
+        }
+        if (j < 0 ||
+            j >= numberOfColumns) {
+            throw new OutOfRangeException(j, 0, numberOfColumns - 1);
+        }
+
+        return network.getNeuron(identifiers[i][j]);
+    }
+
+    /**
+     * Creates the neighbour relationships between neurons.
+     */
+    private void createLinks() {
+        // "linkEnd" will store the identifiers of the "neighbours".
+        final List<Long> linkEnd = new ArrayList<Long>();
+        final int iLast = numberOfRows - 1;
+        final int jLast = numberOfColumns - 1;
+        for (int i = 0; i < numberOfRows; i++) {
+            for (int j = 0; j < numberOfColumns; j++) {
+                linkEnd.clear();
+
+                switch (neighbourhood) {
+
+                case MOORE:
+                    // Add links to "diagonal" neighbours.
+                    if (i > 0) {
+                        if (j > 0) {
+                            linkEnd.add(identifiers[i - 1][j - 1]);
+                        }
+                        if (j < jLast) {
+                            linkEnd.add(identifiers[i - 1][j + 1]);
+                        }
+                    }
+                    if (i < iLast) {
+                        if (j > 0) {
+                            linkEnd.add(identifiers[i + 1][j - 1]);
+                        }
+                        if (j < jLast) {
+                            linkEnd.add(identifiers[i + 1][j + 1]);
+                        }
+                    }
+                    if (wrapRows) {
+                        if (i == 0) {
+                            if (j > 0) {
+                                linkEnd.add(identifiers[iLast][j - 1]);
+                            }
+                            if (j < jLast) {
+                                linkEnd.add(identifiers[iLast][j + 1]);
+                            }
+                        } else if (i == iLast) {
+                            if (j > 0) {
+                                linkEnd.add(identifiers[0][j - 1]);
+                            }
+                            if (j < jLast) {
+                                linkEnd.add(identifiers[0][j + 1]);
+                            }
+                        }
+                    }
+                    if (wrapColumns) {
+                        if (j == 0) {
+                            if (i > 0) {
+                                linkEnd.add(identifiers[i - 1][jLast]);
+                            }
+                            if (i < iLast) {
+                                linkEnd.add(identifiers[i + 1][jLast]);
+                            }
+                        } else if (j == jLast) {
+                             if (i > 0) {
+                                 linkEnd.add(identifiers[i - 1][0]);
+                             }
+                             if (i < iLast) {
+                                 linkEnd.add(identifiers[i + 1][0]);
+                             }
+                        }
+                    }
+                    if (wrapRows &&
+                        wrapColumns) {
+                        if (i == 0 &&
+                            j == 0) {
+                            linkEnd.add(identifiers[iLast][jLast]);
+                        } else if (i == 0 &&
+                                   j == jLast) {
+                            linkEnd.add(identifiers[iLast][0]);
+                        } else if (i == iLast &&
+                                   j == 0) {
+                            linkEnd.add(identifiers[0][jLast]);
+                        } else if (i == iLast &&
+                                   j == jLast) {
+                            linkEnd.add(identifiers[0][0]);
+                        }
+                    }
+
+                    // Case falls through since the "Moore" neighbourhood
+                    // also contains the neurons that belong to the "Von
+                    // Neumann" neighbourhood.
+
+                    // fallthru (CheckStyle)
+                case VON_NEUMANN:
+                    // Links to preceding and following "row".
+                    if (i > 0) {
+                        linkEnd.add(identifiers[i - 1][j]);
+                    }
+                    if (i < iLast) {
+                        linkEnd.add(identifiers[i + 1][j]);
+                    }
+                    if (wrapRows) {
+                        if (i == 0) {
+                            linkEnd.add(identifiers[iLast][j]);
+                        } else if (i == iLast) {
+                            linkEnd.add(identifiers[0][j]);
+                        }
+                    }
+
+                    // Links to preceding and following "column".
+                    if (j > 0) {
+                        linkEnd.add(identifiers[i][j - 1]);
+                    }
+                    if (j < jLast) {
+                        linkEnd.add(identifiers[i][j + 1]);
+                    }
+                    if (wrapColumns) {
+                        if (j == 0) {
+                            linkEnd.add(identifiers[i][jLast]);
+                        } else if (j == jLast) {
+                            linkEnd.add(identifiers[i][0]);
+                        }
+                    }
+                    break;
+
+                default:
+                    throw new MathInternalError(); // Cannot happen.
+                }
+
+                final Neuron aNeuron = network.getNeuron(identifiers[i][j]);
+                for (long b : linkEnd) {
+                    final Neuron bNeuron = network.getNeuron(b);
+                    // Link to all neighbours.
+                    // The reverse links will be added as the loop proceeds.
+                    network.addLink(aNeuron, bNeuron);
+                }
+            }
+        }
+    }
+
+    /**
+     * Prevents proxy bypass.
+     *
+     * @param in Input stream.
+     */
+    private void readObject(ObjectInputStream in) {
+        throw new IllegalStateException();
+    }
+
+    /**
+     * Custom serialization.
+     *
+     * @return the proxy instance that will be actually serialized.
+     */
+    private Object writeReplace() {
+        final double[][][] featuresList = new double[numberOfRows][numberOfColumns][];
+        for (int i = 0; i < numberOfRows; i++) {
+            for (int j = 0; j < numberOfColumns; j++) {
+                featuresList[i][j] = getNeuron(i, j).getFeatures();
+            }
+        }
+
+        return new SerializationProxy(wrapRows,
+                                      wrapColumns,
+                                      neighbourhood,
+                                      featuresList);
+    }
+
+    /**
+     * Serialization.
+     */
+    private static class SerializationProxy implements Serializable {
+        /** Serializable. */
+        private static final long serialVersionUID = 20130226L;
+        /** Wrap. */
+        private final boolean wrapRows;
+        /** Wrap. */
+        private final boolean wrapColumns;
+        /** Neighbourhood type. */
+        private final SquareNeighbourhood neighbourhood;
+        /** Neurons' features. */
+        private final double[][][] featuresList;
+
+        /**
+         * @param wrapRows Whether the row dimension is wrapped.
+         * @param wrapColumns Whether the column dimension is wrapped.
+         * @param neighbourhood Neighbourhood type.
+         * @param featuresList List of neurons features.
+         * {@code neuronList}.
+         */
+        SerializationProxy(boolean wrapRows,
+                           boolean wrapColumns,
+                           SquareNeighbourhood neighbourhood,
+                           double[][][] featuresList) {
+            this.wrapRows = wrapRows;
+            this.wrapColumns = wrapColumns;
+            this.neighbourhood = neighbourhood;
+            this.featuresList = featuresList;
+        }
+
+        /**
+         * Custom serialization.
+         *
+         * @return the {@link Neuron} for which this instance is the proxy.
+         */
+        private Object readResolve() {
+            return new NeuronSquareMesh2D(wrapRows,
+                                          wrapColumns,
+                                          neighbourhood,
+                                          featuresList);
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/NeuronSquareMesh2D.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java (added)
+++ commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Two-dimensional neural networks.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.twod;

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/main/java/org/apache/commons/math3/ml/neuralnet/twod/package-info.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet;
+
+import java.util.Set;
+import java.util.HashSet;
+import org.apache.commons.math3.ml.distance.DistanceMeasure;
+import org.apache.commons.math3.ml.distance.EuclideanDistance;
+import org.apache.commons.math3.ml.neuralnet.oned.NeuronString;
+import org.junit.Test;
+import org.junit.Assert;
+
+/**
+ * Tests for {@link MapUtils} class.
+ */
+public class MapUtilsTest {
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1-----2
+     */
+    @Test
+    public void testFindClosestNeuron() {
+        final FeatureInitializer init
+            = new OffsetFeatureInitializer(FeatureInitializerFactory.uniform(-0.1, 0.1));
+        final FeatureInitializer[] initArray = { init };
+
+        final Network net = new NeuronString(3, false, initArray).getNetwork();
+        final DistanceMeasure dist = new EuclideanDistance();
+
+        final Set<Neuron> allBest = new HashSet<Neuron>();
+        final Set<Neuron> best = new HashSet<Neuron>();
+        double[][] features;
+
+        // The following tests ensures that
+        // 1. the same neuron is always selected when the input feature is
+        //    in the range of the initializer,
+        // 2. different network's neuron have been selected by inputs features
+        //    that belong to different ranges.
+
+        best.clear();
+        features = new double[][] {
+            { -1 },
+            { 0.4 },
+        };
+        for (double[] f : features) {
+            best.add(MapUtils.findBest(f, net, dist));
+        }
+        Assert.assertEquals(1, best.size());
+        allBest.addAll(best);
+
+        best.clear();
+        features = new double[][] {
+            { 0.6 },
+            { 1.4 },
+        };
+        for (double[] f : features) {
+            best.add(MapUtils.findBest(f, net, dist));
+        }
+        Assert.assertEquals(1, best.size());
+        allBest.addAll(best);
+
+        best.clear();
+        features = new double[][] {
+            { 1.6 },
+            { 3 },
+        };
+        for (double[] f : features) {
+            best.add(MapUtils.findBest(f, net, dist));
+        }
+        Assert.assertEquals(1, best.size());
+        allBest.addAll(best);
+
+        Assert.assertEquals(3, allBest.size());
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/MapUtilsTest.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet;
+
+import java.io.ByteArrayOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Collection;
+import java.util.NoSuchElementException;
+import org.junit.Test;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.apache.commons.math3.exception.NumberIsTooSmallException;
+import org.apache.commons.math3.ml.neuralnet.twod.NeuronSquareMesh2D;
+import org.apache.commons.math3.random.Well44497b;
+
+/**
+ * Tests for {@link Network}.
+ */
+public class NetworkTest {
+    final FeatureInitializer init = FeatureInitializerFactory.uniform(0, 2);
+
+    @Test
+    public void testGetFeaturesSize() {
+        final FeatureInitializer[] initArray = { init, init, init };
+
+        final Network net = new NeuronSquareMesh2D(2, false,
+                                                   2, false,
+                                                   SquareNeighbourhood.VON_NEUMANN,
+                                                   initArray).getNetwork();
+        Assert.assertEquals(3, net.getFeaturesSize());
+    }
+
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1
+     *  |     |
+     *  |     |
+     *  2-----3
+     */
+    @Test
+    public void testDeleteLink() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronSquareMesh2D(2, false,
+                                                   2, false,
+                                                   SquareNeighbourhood.VON_NEUMANN,
+                                                   initArray).getNetwork();
+        Collection<Neuron> neighbours;
+
+        // Delete 0 --> 1.
+        net.deleteLink(net.getNeuron(0),
+                       net.getNeuron(1));
+
+        // Link from 0 to 1 was deleted.
+        neighbours = net.getNeighbours(net.getNeuron(0));
+        Assert.assertFalse(neighbours.contains(net.getNeuron(1)));
+        // Link from 1 to 0 still exists.
+        neighbours = net.getNeighbours(net.getNeuron(1));
+        Assert.assertTrue(neighbours.contains(net.getNeuron(0)));
+    }
+
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1
+     *  |     |
+     *  |     |
+     *  2-----3
+     */
+    @Test
+    public void testDeleteNeuron() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronSquareMesh2D(2, false,
+                                                   2, false,
+                                                   SquareNeighbourhood.VON_NEUMANN,
+                                                   initArray).getNetwork();
+
+        Assert.assertEquals(2, net.getNeighbours(net.getNeuron(0)).size());
+        Assert.assertEquals(2, net.getNeighbours(net.getNeuron(3)).size());
+
+        // Delete neuron 1.
+        net.deleteNeuron(net.getNeuron(1));
+
+        try {
+            net.getNeuron(1);
+        } catch (NoSuchElementException expected) {}
+
+        Assert.assertEquals(1, net.getNeighbours(net.getNeuron(0)).size());
+        Assert.assertEquals(1, net.getNeighbours(net.getNeuron(3)).size());
+    }
+
+    @Test
+    public void testIterationOrder() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronSquareMesh2D(4, false,
+                                                   3, true,
+                                                   SquareNeighbourhood.VON_NEUMANN,
+                                                   initArray).getNetwork();
+
+        boolean isUnspecifiedOrder = false;
+
+        // Check that the default iterator returns the neurons
+        // in an unspecified order.
+        long previousId = Long.MIN_VALUE;
+        for (Neuron n : net) {
+            final long currentId = n.getIdentifier();
+            if (currentId < previousId) {
+                isUnspecifiedOrder = true;
+                break;
+            }
+            previousId = currentId;
+        }
+        Assert.assertTrue(isUnspecifiedOrder);
+
+        // Check that the comparator provides a specific order.
+        isUnspecifiedOrder = false;
+        previousId = Long.MIN_VALUE;
+        for (Neuron n : net.getNeurons(new Network.NeuronIdentifierComparator())) {
+            final long currentId = n.getIdentifier();
+            if (currentId < previousId) {
+                isUnspecifiedOrder = true;
+                break;
+            }
+            previousId = currentId;
+        }
+        Assert.assertFalse(isUnspecifiedOrder);
+    }
+
+    @Test
+    public void testSerialize()
+        throws IOException,
+               ClassNotFoundException {
+        final FeatureInitializer[] initArray = { init };
+        final Network out = new NeuronSquareMesh2D(4, false,
+                                                   3, true,
+                                                   SquareNeighbourhood.VON_NEUMANN,
+                                                   initArray).getNetwork();
+
+        final ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        final ObjectOutputStream oos = new ObjectOutputStream(bos);
+        oos.writeObject(out);
+
+        final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
+        final ObjectInputStream ois = new ObjectInputStream(bis);
+        final Network in = (Network) ois.readObject();
+
+        for (Neuron nOut : out) {
+            final Neuron nIn = in.getNeuron(nOut.getIdentifier());
+
+            // Same values.
+            final double[] outF = nOut.getFeatures();
+            final double[] inF = nIn.getFeatures();
+            Assert.assertEquals(outF.length, inF.length);
+            for (int i = 0; i < outF.length; i++) {
+                Assert.assertEquals(outF[i], inF[i], 0d);
+            }
+
+            // Same neighbours.
+            final Collection<Neuron> outNeighbours = out.getNeighbours(nOut);
+            final Collection<Neuron> inNeighbours = in.getNeighbours(nIn);
+            Assert.assertEquals(outNeighbours.size(), inNeighbours.size());
+            for (Neuron oN : outNeighbours) {
+                Assert.assertTrue(inNeighbours.contains(in.getNeuron(oN.getIdentifier())));
+            }
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NetworkTest.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet;
+
+import java.io.ByteArrayOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectInputStream;
+import java.io.IOException;
+import org.junit.Test;
+import org.junit.Assert;
+
+/**
+ * Tests for {@link Neuron}.
+ */
+public class NeuronTest {
+    @Test
+    public void testGetIdentifier() {
+        final long id = 1234567;
+        final Neuron n = new Neuron(id, new double[] { 0 });
+
+        Assert.assertEquals(id, n.getIdentifier());
+    }
+
+    @Test
+    public void testGetSize() {
+        final double[] features = { -1, -1e-97, 0, 23.456, 9.01e203 } ;
+        final Neuron n = new Neuron(1, features);
+        Assert.assertEquals(features.length, n.getSize());
+    }
+
+    @Test
+    public void testGetFeatures() {
+        final double[] features = { -1, -1e-97, 0, 23.456, 9.01e203 } ;
+        final Neuron n = new Neuron(1, features);
+
+        final double[] f = n.getFeatures();
+        // Accessor returns a copy.
+        Assert.assertFalse(f == features);
+
+        // Values are the same.
+        Assert.assertEquals(features.length, f.length);
+        for (int i = 0; i < features.length; i++) {
+            Assert.assertEquals(features[i], f[i], 0d);
+        }
+    }
+
+    @Test
+    public void testCompareAndSetFeatures() {
+        final Neuron n = new Neuron(1, new double[] { 0 });
+        double[] expect = n.getFeatures();
+        double[] update = new double[] { expect[0] + 1.23 };
+
+        // Test "success".
+        boolean ok = n.compareAndSetFeatures(expect, update);
+        // Check that the update is reported as successful.
+        Assert.assertTrue(ok);
+        // Check that the new value is correct.
+        Assert.assertEquals(update[0],  n.getFeatures()[0], 0d);
+
+        // Test "failure".
+        double[] update1 = new double[] { update[0] + 4.56 };
+        // Must return "false" because the neuron has been
+        // updated: a new update can only succeed if "expect"
+        // is set to the new features.
+        ok = n.compareAndSetFeatures(expect, update1);
+        // Check that the update is reported as failed.
+        Assert.assertFalse(ok);
+        // Check that the value was not changed.
+        Assert.assertEquals(update[0],  n.getFeatures()[0], 0d);
+    }
+
+    @Test
+    public void testSerialize()
+        throws IOException,
+               ClassNotFoundException {
+        final Neuron out = new Neuron(123, new double[] { -98.76, -1, 0, 1e-23, 543.21, 1e234 });
+        final ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        final ObjectOutputStream oos = new ObjectOutputStream(bos);
+        oos.writeObject(out);
+
+        final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
+        final ObjectInputStream ois = new ObjectInputStream(bis);
+        final Neuron in = (Neuron) ois.readObject();
+
+        // Same identifier.
+        Assert.assertEquals(out.getIdentifier(),
+                            in.getIdentifier());
+        // Same values.
+        final double[] outF = out.getFeatures();
+        final double[] inF = in.getFeatures();
+        Assert.assertEquals(outF.length, inF.length);
+        for (int i = 0; i < outF.length; i++) {
+            Assert.assertEquals(outF[i], inF[i], 0d);
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/NeuronTest.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet;
+
+import org.junit.Test;
+import org.junit.Assert;
+import org.apache.commons.math3.random.RandomGenerator;
+import org.apache.commons.math3.random.Well44497b;
+
+/**
+ * Wraps a given initializer.
+ */
+public class OffsetFeatureInitializer
+    implements FeatureInitializer {
+    /** Wrapped initializer. */
+    private final FeatureInitializer orig;
+    /** Offset. */
+    private int inc = 0;
+
+    /**
+     * Creates a new initializer whose {@link #value()} method
+     * will return {@code orig.value() + offset}, where
+     * {@code offset} is automatically incremented by one at
+     * each call.
+     *
+     * @param orig Original initializer.
+     */    
+    public OffsetFeatureInitializer(FeatureInitializer orig) {
+        this.orig = orig;
+    }
+
+    /** {@inheritDoc} */
+    public double value() {
+        return orig.value() + inc++;
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/OffsetFeatureInitializer.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.oned;
+
+import java.io.ByteArrayOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Collection;
+import org.junit.Test;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.apache.commons.math3.exception.NumberIsTooSmallException;
+import org.apache.commons.math3.ml.neuralnet.FeatureInitializer;
+import org.apache.commons.math3.ml.neuralnet.FeatureInitializerFactory;
+import org.apache.commons.math3.ml.neuralnet.Network;
+import org.apache.commons.math3.ml.neuralnet.Neuron;
+import org.apache.commons.math3.random.Well44497b;
+
+/**
+ * Tests for {@link NeuronString} and {@link Network} functionality for
+ * a one-dimensional network.
+ */
+public class NeuronStringTest {
+    final FeatureInitializer init = FeatureInitializerFactory.uniform(0, 2);
+
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1-----2-----3
+     */
+    @Test
+    public void testSegmentNetwork() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronString(4, false, initArray).getNetwork();
+
+        Collection<Neuron> neighbours;
+
+        // Neuron 0.
+        neighbours = net.getNeighbours(net.getNeuron(0));
+        for (long nId : new long[] { 1 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(1, neighbours.size());
+
+        // Neuron 1.
+        neighbours = net.getNeighbours(net.getNeuron(1));
+        for (long nId : new long[] { 0, 2 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+
+        // Neuron 2.
+        neighbours = net.getNeighbours(net.getNeuron(2));
+        for (long nId : new long[] { 1, 3 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+
+        // Neuron 3.
+        neighbours = net.getNeighbours(net.getNeuron(3));
+        for (long nId : new long[] { 2 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(1, neighbours.size());
+    }
+
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1-----2-----3
+     */
+    @Test
+    public void testCircleNetwork() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronString(4, true, initArray).getNetwork();
+
+        Collection<Neuron> neighbours;
+
+        // Neuron 0.
+        neighbours = net.getNeighbours(net.getNeuron(0));
+        for (long nId : new long[] { 1, 3 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+
+        // Neuron 1.
+        neighbours = net.getNeighbours(net.getNeuron(1));
+        for (long nId : new long[] { 0, 2 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+
+        // Neuron 2.
+        neighbours = net.getNeighbours(net.getNeuron(2));
+        for (long nId : new long[] { 1, 3 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+
+        // Neuron 3.
+        neighbours = net.getNeighbours(net.getNeuron(3));
+        for (long nId : new long[] { 0, 2 }) {
+            Assert.assertTrue(neighbours.contains(net.getNeuron(nId)));
+        }
+        // Ensures that no other neurons is in the neihbourhood set.
+        Assert.assertEquals(2, neighbours.size());
+    }
+
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1-----2-----3-----4
+     */
+    @Test
+    public void testGetNeighboursWithExclude() {
+        final FeatureInitializer[] initArray = { init };
+        final Network net = new NeuronString(5, true, initArray).getNetwork();
+        final Collection<Neuron> exclude = new ArrayList<Neuron>();
+        exclude.add(net.getNeuron(1));
+        final Collection<Neuron> neighbours = net.getNeighbours(net.getNeuron(0),
+                                                                exclude);
+        Assert.assertTrue(neighbours.contains(net.getNeuron(4)));
+        Assert.assertEquals(1, neighbours.size());
+    }
+
+    @Test
+    public void testSerialize()
+        throws IOException,
+               ClassNotFoundException {
+        final FeatureInitializer[] initArray = { init };
+        final NeuronString out = new NeuronString(4, false, initArray);
+
+        final ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        final ObjectOutputStream oos = new ObjectOutputStream(bos);
+        oos.writeObject(out);
+
+        final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
+        final ObjectInputStream ois = new ObjectInputStream(bis);
+        final NeuronString in = (NeuronString) ois.readObject();
+
+        for (Neuron nOut : out.getNetwork()) {
+            final Neuron nIn = in.getNetwork().getNeuron(nOut.getIdentifier());
+
+            // Same values.
+            final double[] outF = nOut.getFeatures();
+            final double[] inF = nIn.getFeatures();
+            Assert.assertEquals(outF.length, inF.length);
+            for (int i = 0; i < outF.length; i++) {
+                Assert.assertEquals(outF[i], inF[i], 0d);
+            }
+
+            // Same neighbours.
+            final Collection<Neuron> outNeighbours = out.getNetwork().getNeighbours(nOut);
+            final Collection<Neuron> inNeighbours = in.getNetwork().getNeighbours(nIn);
+            Assert.assertEquals(outNeighbours.size(), inNeighbours.size());
+            for (Neuron oN : outNeighbours) {
+                Assert.assertTrue(inNeighbours.contains(in.getNetwork().getNeuron(oN.getIdentifier())));
+            }
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/oned/NeuronStringTest.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;
+
+import java.util.Set;
+import java.util.HashSet;
+import java.util.Collection;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.ExecutionException;
+import java.io.PrintWriter;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.Assert;
+import org.junit.runner.RunWith;
+import org.apache.commons.math3.RetryRunner;
+import org.apache.commons.math3.Retry;
+import org.apache.commons.math3.util.FastMath;
+import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
+
+/**
+ * Tests for {@link KohonenTrainingTask}
+ */
+@RunWith(RetryRunner.class)
+public class KohonenTrainingTaskTest {
+    @Test
+    public void testTravellerSalesmanSquareTourSequentialSolver() {
+        // Cities (in optimal travel order).
+        final City[] squareOfCities = new City[] {
+            new City("o0", 0, 0),
+            new City("o1", 1, 0),
+            new City("o2", 2, 0),
+            new City("o3", 3, 0),
+            new City("o4", 3, 1),
+            new City("o5", 3, 2),
+            new City("o6", 3, 3),
+            new City("o7", 2, 3),
+            new City("o8", 1, 3),
+            new City("o9", 0, 3),
+            new City("i3", 1, 2),
+            new City("i2", 2, 2),
+            new City("i1", 2, 1),
+            new City("i0", 1, 1),
+        };
+
+        final TravellingSalesmanSolver solver = new TravellingSalesmanSolver(squareOfCities, 2);
+        // printSummary("before.travel.seq.dat", solver);
+        solver.createSequentialTask(15000).run();
+        // printSummary("after.travel.seq.dat", solver);
+        final City[] result = solver.getCityList();
+        Assert.assertEquals(squareOfCities.length,
+                            uniqueCities(result).size());
+        final double ratio = computeTravelDistance(squareOfCities) / computeTravelDistance(result);
+        Assert.assertEquals(1, ratio, 1e-1); // We do not require the optimal travel.
+    }
+
+    // Test can sometimes fail: Run several times.
+    @Test
+    @Retry
+    public void testTravellerSalesmanSquareTourParallelSolver() throws ExecutionException {
+        // Cities (in optimal travel order).
+        final City[] squareOfCities = new City[] {
+            new City("o0", 0, 0),
+            new City("o1", 1, 0),
+            new City("o2", 2, 0),
+            new City("o3", 3, 0),
+            new City("o4", 3, 1),
+            new City("o5", 3, 2),
+            new City("o6", 3, 3),
+            new City("o7", 2, 3),
+            new City("o8", 1, 3),
+            new City("o9", 0, 3),
+            new City("i3", 1, 2),
+            new City("i2", 2, 2),
+            new City("i1", 2, 1),
+            new City("i0", 1, 1),
+        };
+
+        final TravellingSalesmanSolver solver = new TravellingSalesmanSolver(squareOfCities, 2);
+        // printSummary("before.travel.par.dat", solver);
+
+        // Parallel execution.
+        final ExecutorService service = Executors.newCachedThreadPool();
+        final Runnable[] tasks = solver.createParallelTasks(3, 5000);
+        final List<Future<?>> execOutput = new ArrayList<Future<?>>();
+        // Run tasks.
+        for (Runnable r : tasks) {
+            execOutput.add(service.submit(r));
+        }
+        // Wait for completion (ignoring return value).
+        try {
+            for (Future<?> f : execOutput) {
+                f.get();
+            }
+        } catch (InterruptedException ignored) {}
+        // Terminate all threads.
+        service.shutdown();
+
+        // printSummary("after.travel.par.dat", solver);
+        final City[] result = solver.getCityList();
+        Assert.assertEquals(squareOfCities.length,
+                            uniqueCities(result).size());
+        final double ratio = computeTravelDistance(squareOfCities) / computeTravelDistance(result);
+        Assert.assertEquals(1, ratio, 1e-1); // We do not require the optimal travel.
+    }
+
+    /**
+     * Creates a map of the travel suggested by the solver.
+     *
+     * @param solver Solver.
+     * @return a 4-columns table: {@code <x (neuron)> <y (neuron)> <x (city)> <y (city)>}.
+     */
+    private String travelCoordinatesTable(TravellingSalesmanSolver solver) {
+        final StringBuilder s = new StringBuilder();
+        for (double[] c : solver.getCoordinatesList()) {
+            s.append(c[0]).append(" ").append(c[1]).append(" ");
+            final City city = solver.getClosestCity(c[0], c[1]);
+            final double[] cityCoord = city.getCoordinates(); 
+            s.append(cityCoord[0]).append(" ").append(cityCoord[1]).append(" ");
+            s.append("   # ").append(city.getName()).append("\n");
+        }
+        return s.toString();
+    }
+
+    /**
+     * Compute the distance covered by the salesman, including
+     * the trip back (from the last to first city).
+     *
+     * @param cityList List of cities visited during the travel.
+     * @return the total distance.
+     */
+    private Collection<City> uniqueCities(City[] cityList) {
+        final Set<City> unique = new HashSet<City>();
+        for (City c : cityList) {
+            unique.add(c);
+        }
+        return unique;
+    }
+
+    /**
+     * Compute the distance covered by the salesman, including
+     * the trip back (from the last to first city).
+     *
+     * @param cityList List of cities visited during the travel.
+     * @return the total distance.
+     */
+    private double computeTravelDistance(City[] cityList) {
+        double dist = 0;
+        for (int i = 0; i < cityList.length; i++) {
+            final double[] currentCoord = cityList[i].getCoordinates();
+            final double[] nextCoord = cityList[(i + 1) % cityList.length].getCoordinates();
+
+            final double xDiff = currentCoord[0] - nextCoord[0];
+            final double yDiff = currentCoord[1] - nextCoord[1];
+
+            dist += FastMath.sqrt(xDiff * xDiff + yDiff * yDiff);
+        }
+
+        return dist;
+    }
+
+    /**
+     * Prints a summary of the current state of the solver to the
+     * given filename.
+     *
+     * @param filename File.
+     * @param solver Solver.
+     */
+    private void printSummary(String filename,
+                              TravellingSalesmanSolver solver) {
+        PrintWriter out = null;
+        try {
+            out = new PrintWriter(filename);
+            out.println(travelCoordinatesTable(solver));
+
+            final City[] result = solver.getCityList();
+            out.println("# Number of unique cities: " + uniqueCities(result).size());
+            out.println("# Travel distance: " + computeTravelDistance(result));
+        } catch (Exception e) {
+            // Do nothing.
+        } finally {
+            if (out != null) {
+                out.close();
+            }
+        }
+    }
+}

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenTrainingTaskTest.java
------------------------------------------------------------------------------
    svn:keywords = Id Revision

Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenUpdateActionTest.java
URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenUpdateActionTest.java?rev=1557267&view=auto
==============================================================================
--- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenUpdateActionTest.java (added)
+++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/ml/neuralnet/sofm/KohonenUpdateActionTest.java Fri Jan 10 22:01:27 2014
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.commons.math3.ml.neuralnet.sofm;
+
+import org.apache.commons.math3.ml.neuralnet.Neuron;
+import org.apache.commons.math3.ml.neuralnet.Network;
+import org.apache.commons.math3.ml.neuralnet.MapUtils;
+import org.apache.commons.math3.ml.neuralnet.UpdateAction;
+import org.apache.commons.math3.ml.neuralnet.OffsetFeatureInitializer;
+import org.apache.commons.math3.ml.neuralnet.FeatureInitializer;
+import org.apache.commons.math3.ml.neuralnet.FeatureInitializerFactory;
+import org.apache.commons.math3.ml.distance.DistanceMeasure;
+import org.apache.commons.math3.ml.distance.EuclideanDistance;
+import org.apache.commons.math3.ml.neuralnet.oned.NeuronString;
+import org.junit.Test;
+import org.junit.Assert;
+
+/**
+ * Tests for {@link KohonenUpdateAction} class.
+ */
+public class KohonenUpdateActionTest {
+    /*
+     * Test assumes that the network is
+     *
+     *  0-----1-----2
+     */
+    @Test
+    public void testUpdate() {
+        final FeatureInitializer init
+            = new OffsetFeatureInitializer(FeatureInitializerFactory.uniform(0, 0.1));
+        final FeatureInitializer[] initArray = { init };
+
+        final int netSize = 3;
+        final Network net = new NeuronString(netSize, false, initArray).getNetwork();
+        final DistanceMeasure dist = new EuclideanDistance();
+        final LearningFactorFunction learning
+            = LearningFactorFunctionFactory.exponentialDecay(1, 0.1, 100);
+        final NeighbourhoodSizeFunction neighbourhood
+            = NeighbourhoodSizeFunctionFactory.exponentialDecay(3, 1, 100);
+        final UpdateAction update = new KohonenUpdateAction(dist, learning, neighbourhood);
+
+        // The following test ensures that, after one "update",
+        // 1. when the initial learning rate equal to 1, the best matching
+        //    neuron's features are mapped to the input's features,
+        // 2. when the initial neighbourhood is larger than the network's size,
+        //    all neuron's features get closer to the input's features.
+
+        final double[] features = new double[] { 0.3 };
+        final double[] distancesBefore = new double[netSize];
+        int count = 0;
+        for (Neuron n : net) {
+            distancesBefore[count++] = dist.compute(n.getFeatures(), features);
+        }
+        final Neuron bestBefore = MapUtils.findBest(features, net, dist);
+
+        // Initial distance from the best match is larger than zero.
+        Assert.assertTrue(dist.compute(bestBefore.getFeatures(), features) >= 0.2 * 0.2);
+
+        update.update(net, features);
+
+        final double[] distancesAfter = new double[netSize];
+        count = 0;
+        for (Neuron n : net) {
+            distancesAfter[count++] = dist.compute(n.getFeatures(), features);
+        }
+        final Neuron bestAfter = MapUtils.findBest(features, net, dist);
+
+        Assert.assertEquals(bestBefore, bestAfter);
+        // Distance is now zero.
+        Assert.assertEquals(0, dist.compute(bestAfter.getFeatures(), features), 0d);
+
+        for (int i = 0; i < netSize; i++) {
+            // All distances have decreased.
+            Assert.assertTrue(distancesAfter[i] < distancesBefore[i]);
+        }
+    }
+}