You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@labs.apache.org by to...@apache.org on 2012/12/12 08:37:57 UTC
svn commit: r1420552 - in /labs/yay/trunk/core/src:
main/java/org/apache/yay/ test/java/org/apache/yay/
Author: tommaso
Date: Wed Dec 12 07:37:53 2012
New Revision: 1420552
URL: http://svn.apache.org/viewvc?rev=1420552&view=rev
Log:
added test for sigmoid function, plus test for a sample not known neural network, minor fixes (comments, javadoc)
Added:
labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java (with props)
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/ActivationFunction.java
labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java
labs/yay/trunk/core/src/main/java/org/apache/yay/FeedForwardStrategy.java
labs/yay/trunk/core/src/main/java/org/apache/yay/SigmoidFunction.java
labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java
Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/ActivationFunction.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/ActivationFunction.java?rev=1420552&r1=1420551&r2=1420552&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/ActivationFunction.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/ActivationFunction.java Wed Dec 12 07:37:53 2012
@@ -19,7 +19,10 @@
package org.apache.yay;
/**
- * An activation function receives a signal and generates a new signal AF : S -> S
+ * An activation function AF : S -> S receives a signal and generates a new signal.
+ * An activation function AF has horizontal asymptotes at 0 and 1 and a non
+ * decreasing first derivative AF' with AF and AF' both being computable.
+ *
*/
public interface ActivationFunction<T> {
Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java?rev=1420552&r1=1420551&r2=1420552&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java Wed Dec 12 07:37:53 2012
@@ -33,9 +33,11 @@ import org.apache.yay.utils.ConversionUt
public class BackPropagationLearningStrategy implements LearningStrategy<Double, Double[]> {
private final PredictionStrategy<Double, Double[]> predictionStrategy;
+ private CostFunction<WeightsMatrix, Double> costFunction;
- public BackPropagationLearningStrategy(PredictionStrategy<Double, Double[]> predictionStrategy) {
+ public BackPropagationLearningStrategy(PredictionStrategy<Double, Double[]> predictionStrategy, CostFunction<WeightsMatrix, Double> costFunction) {
this.predictionStrategy = predictionStrategy;
+ this.costFunction = costFunction;
}
@Override
@@ -56,10 +58,10 @@ public class BackPropagationLearningStra
RealVector error = predictedOutputVector.subtract(learnedOutputRealVector); // final layer error vector
activationErrors[activationErrors.length - 1] = new Array2DRowRealMatrix(error.toArray());
- RealVector nextLayerDelta = error;
+ RealVector nextLayerDelta = new ArrayRealVector(error);
// back prop the error and update the activationErrors accordingly
- // TODO : remove the byas term from the error calculations
+ // TODO : remove the bias term from the error calculations
for (int l = weightsMatrixSet.length - 2; l > 0; l--) {
WeightsMatrix currentMatrix = weightsMatrixSet[l];
ArrayRealVector realVector = new ArrayRealVector(output.getColumn(l)); // get l-th nn layer activations
@@ -88,7 +90,7 @@ public class BackPropagationLearningStra
deltas[i] = deltas[i].scalarMultiply(1 / trainingExamples.size());
}
- // now apply gradient descent (or other optimization/minimization algorithms) with this derivative terms and the LRCF
+ // TODO : now apply gradient descent (or other optimization/minimization algorithms) with this derivative terms and the cost function
return null;
}
Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/FeedForwardStrategy.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/FeedForwardStrategy.java?rev=1420552&r1=1420551&r2=1420552&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/FeedForwardStrategy.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/FeedForwardStrategy.java Wed Dec 12 07:37:53 2012
@@ -62,7 +62,7 @@ public class FeedForwardStrategy impleme
}
private RealMatrix applyFF(Vector<Double> input, WeightsMatrix[] weightsMatrixSet) {
- // TODO : fix this impl as it's very slow and commons-math Java1.4 constraint is so ugly to see...
+ // TODO : fix this impl as it's very slow
RealVector v = ConversionUtils.toRealVector(input);
RealMatrix x = v.outerProduct(new ArrayRealVector(new Double[]{1d})).transpose(); // a 1xN matrix
for (WeightsMatrix weightsMatrix : weightsMatrixSet) {
Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/SigmoidFunction.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/SigmoidFunction.java?rev=1420552&r1=1420551&r2=1420552&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/SigmoidFunction.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/SigmoidFunction.java Wed Dec 12 07:37:53 2012
@@ -24,7 +24,7 @@ package org.apache.yay;
public class SigmoidFunction implements ActivationFunction<Double> {
public Double apply(final Double input) {
- return 1d / (1d + Math.exp(-1 * input));
+ return 1d / (1d + Math.exp(-1d * input));
}
}
Modified: labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java?rev=1420552&r1=1420551&r2=1420552&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java (original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java Wed Dec 12 07:37:53 2012
@@ -76,6 +76,17 @@ public class NeuralNetworkFactoryTest {
assertEquals(1l, Math.round(norNN.predict(createSample(1d, 1d))));
}
+ @Test
+ public void sampleCreationTest() throws Exception {
+ WeightsMatrix firstLayer = new WeightsMatrix(new double[][]{{1d, 1d, 2d, 3d},{1d, 1d, 2d, 3d}, {1d, 1d, 2d, 3d}});
+ WeightsMatrix secondLayer = new WeightsMatrix(new double[][]{{1d, 2d, 3d}});
+ WeightsMatrix[] weightsMatrixes = new WeightsMatrix[]{firstLayer,secondLayer};
+ NeuralNetwork<Double,Double> neuralNetwork = createFFNN(weightsMatrixes);
+ Double prdictedValue = neuralNetwork.predict(createSample(5d, 6d, 7d));
+ assertEquals(1l, Math.round(prdictedValue));
+ assertEquals(Double.valueOf(0.9975273768433653d), prdictedValue);
+ }
+
private NeuralNetwork<Double, Double> createFFNN(WeightsMatrix[] andWeightsMatrixSet)
throws CreationException {
return NeuralNetworkFactory.create(new LinkedList<TrainingExample<Double, Double>>(),
Added: labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java?rev=1420552&view=auto
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java (added)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java Wed Dec 12 07:37:53 2012
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.yay;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Testcase for {@link SigmoidFunction}
+ */
+public class SigmoidFunctionTest {
+
+ @Test
+ public void testCorrectOutput() throws Exception {
+ SigmoidFunction sigmoidFunction = new SigmoidFunction();
+ Double output = sigmoidFunction.apply(38d);
+ assertEquals(Double.valueOf(1d), output);
+
+ output = sigmoidFunction.apply(6d);
+ assertEquals(Double.valueOf(0.9975273768433653d), output);
+
+ output = sigmoidFunction.apply(2.5d);
+ assertEquals(Double.valueOf(0.9241418199787566d), output);
+
+ output = sigmoidFunction.apply(-2.5d);
+ assertEquals(Double.valueOf(0.07585818002124355d), output);
+
+ output = sigmoidFunction.apply(0d);
+ assertEquals(Double.valueOf(0.5d), output);
+ }
+}
Propchange: labs/yay/trunk/core/src/test/java/org/apache/yay/SigmoidFunctionTest.java
------------------------------------------------------------------------------
svn:eol-style = native
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@labs.apache.org
For additional commands, e-mail: commits-help@labs.apache.org