You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@labs.apache.org by to...@apache.org on 2013/03/06 13:06:06 UTC

svn commit: r1453308 - in /labs/yay/trunk/core/src: main/java/org/apache/yay/ main/java/org/apache/yay/utils/ test/java/org/apache/yay/

Author: tommaso
Date: Wed Mar  6 12:06:06 2013
New Revision: 1453308

URL: http://svn.apache.org/r1453308
Log:
fixed style

Modified:
    labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java
    labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java
    labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
    labs/yay/trunk/core/src/main/java/org/apache/yay/NeuralNetworkFactory.java
    labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ConversionUtils.java
    labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
    labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
    labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
    labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java
    labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/BackPropagationLearningStrategy.java Wed Mar  6 12:06:06 2013
@@ -32,81 +32,81 @@ import java.util.Collection;
  */
 public class BackPropagationLearningStrategy implements LearningStrategy<Double, Double[]> {
 
-    private final PredictionStrategy<Double, Double[]> predictionStrategy;
-    private CostFunction<RealMatrix, Double> costFunction;
+  private final PredictionStrategy<Double, Double[]> predictionStrategy;
+  private CostFunction<RealMatrix, Double> costFunction;
 
-    public BackPropagationLearningStrategy(PredictionStrategy<Double, Double[]> predictionStrategy, CostFunction<RealMatrix, Double> costFunction) {
-        this.predictionStrategy = predictionStrategy;
-        this.costFunction = costFunction;
-    }
-
-    @Override
-    public RealMatrix[] learnWeights(RealMatrix[] weightsMatrixSet, Collection<TrainingExample<Double, Double[]>> trainingExamples) throws WeightLearningException {
-        // set up the accumulator matrix(es)
-        RealMatrix[] triangle = new RealMatrix[weightsMatrixSet.length];
-        for (TrainingExample<Double, Double[]> trainingExample : trainingExamples) {
-            try {
-                // contains activation errors for the current training example
-                // TODO : check if this should be RealVector[] < probably yes
-                RealMatrix[] activationErrors = new RealMatrix[weightsMatrixSet.length - 1];
-
-                // feed forward propagation
-                RealMatrix[] activations = predictionStrategy.debugOutput(ConversionUtils.toValuesCollection(trainingExample.getFeatures()), weightsMatrixSet);
-
-                // calculate output error
-                RealVector error = calculateOutputError(trainingExample, activations);
-
-                activationErrors[activationErrors.length - 1] = new Array2DRowRealMatrix(error.toArray());
-
-                RealVector nextLayerDelta = new ArrayRealVector(error);
-
-                // back prop the error and update the activationErrors accordingly
-                // TODO : eventually remove the bias term from the error calculations
-                for (int l = weightsMatrixSet.length - 2; l >= 0; l--) {
-                    RealVector resultingDeltaVector = calculateDeltaVector(weightsMatrixSet[l], activations[l], nextLayerDelta);
-                    if (activationErrors[l] == null) {
-                        activationErrors[l] = new Array2DRowRealMatrix(new ArrayRealVector(resultingDeltaVector.getDimension(), 1d).toArray());
-                    }
-                    activationErrors[l] = new Array2DRowRealMatrix(resultingDeltaVector.toArray());
-                    nextLayerDelta = resultingDeltaVector;
-                }
-
-                // update the accumulator matrix
-                for (int l = 0; l < triangle.length - 1; l++) {
-                    if (triangle[l] == null) {
-                        triangle[l] = new Array2DRowRealMatrix(weightsMatrixSet[l].getRowDimension(), weightsMatrixSet[l].getColumnDimension());
-                    }
-                    triangle[l] = triangle[l].add(activationErrors[l + 1].getRowVector(0).outerProduct(activations[l].getRowVector(0)));
-                }
-
-            } catch (Exception e) {
-                throw new WeightLearningException("error during phase 1 of back-propagation algorithm", e);
-            }
-        }
-        for (int i = 0; i < triangle.length; i++) {
-            // TODO : introduce regularization diversification on bias term (currently not regularized)
-            triangle[i] = triangle[i].scalarMultiply(1 / trainingExamples.size());
+  public BackPropagationLearningStrategy(PredictionStrategy<Double, Double[]> predictionStrategy, CostFunction<RealMatrix, Double> costFunction) {
+    this.predictionStrategy = predictionStrategy;
+    this.costFunction = costFunction;
+  }
+
+  @Override
+  public RealMatrix[] learnWeights(RealMatrix[] weightsMatrixSet, Collection<TrainingExample<Double, Double[]>> trainingExamples) throws WeightLearningException {
+    // set up the accumulator matrix(es)
+    RealMatrix[] triangle = new RealMatrix[weightsMatrixSet.length];
+    for (TrainingExample<Double, Double[]> trainingExample : trainingExamples) {
+      try {
+        // contains activation errors for the current training example
+        // TODO : check if this should be RealVector[] < probably yes
+        RealMatrix[] activationErrors = new RealMatrix[weightsMatrixSet.length - 1];
+
+        // feed forward propagation
+        RealMatrix[] activations = predictionStrategy.debugOutput(ConversionUtils.toValuesCollection(trainingExample.getFeatures()), weightsMatrixSet);
+
+        // calculate output error
+        RealVector error = calculateOutputError(trainingExample, activations);
+
+        activationErrors[activationErrors.length - 1] = new Array2DRowRealMatrix(error.toArray());
+
+        RealVector nextLayerDelta = new ArrayRealVector(error);
+
+        // back prop the error and update the activationErrors accordingly
+        // TODO : eventually remove the bias term from the error calculations
+        for (int l = weightsMatrixSet.length - 2; l >= 0; l--) {
+          RealVector resultingDeltaVector = calculateDeltaVector(weightsMatrixSet[l], activations[l], nextLayerDelta);
+          if (activationErrors[l] == null) {
+            activationErrors[l] = new Array2DRowRealMatrix(new ArrayRealVector(resultingDeltaVector.getDimension(), 1d).toArray());
+          }
+          activationErrors[l] = new Array2DRowRealMatrix(resultingDeltaVector.toArray());
+          nextLayerDelta = resultingDeltaVector;
         }
 
-        // TODO : now apply gradient descent (or other optimization/minimization algorithms) with this derivative terms and the cost function
+        // update the accumulator matrix
+        for (int l = 0; l < triangle.length - 1; l++) {
+          if (triangle[l] == null) {
+            triangle[l] = new Array2DRowRealMatrix(weightsMatrixSet[l].getRowDimension(), weightsMatrixSet[l].getColumnDimension());
+          }
+          triangle[l] = triangle[l].add(activationErrors[l + 1].getRowVector(0).outerProduct(activations[l].getRowVector(0)));
+        }
 
-        return null;
+      } catch (Exception e) {
+        throw new WeightLearningException("error during phase 1 of back-propagation algorithm", e);
+      }
     }
-
-    private RealVector calculateDeltaVector(RealMatrix thetaL, RealMatrix activation, RealVector nextLayerDelta) {
-        ArrayRealVector activationsVector = new ArrayRealVector(activation.getRowVector(0)); // get l-th nn layer activations
-        ArrayRealVector identity = new ArrayRealVector(activationsVector.getDimension(), 1d);
-        RealVector gz = activationsVector.ebeMultiply(identity.subtract(activationsVector)); // = a^l .* (1-a^l)
-        return thetaL.transpose().preMultiply(nextLayerDelta).ebeMultiply(gz);
+    for (int i = 0; i < triangle.length; i++) {
+      // TODO : introduce regularization diversification on bias term (currently not regularized)
+      triangle[i] = triangle[i].scalarMultiply(1 / trainingExamples.size());
     }
 
-    private RealVector calculateOutputError(TrainingExample<Double, Double[]> trainingExample, RealMatrix[] activations) {
-        RealMatrix output = activations[activations.length - 1];
-        Double[] learnedOutput = trainingExample.getOutput(); // training example output
-        RealVector predictedOutputVector = new ArrayRealVector(output.getColumn(output.getColumnDimension() - 1)); // turn output to vector
-        RealVector learnedOutputRealVector = new ArrayRealVector(learnedOutput); // turn example output to a vector
+    // TODO : now apply gradient descent (or other optimization/minimization algorithms) with this derivative terms and the cost function
 
-        // TODO : improve error calculation > this should be er_a = out_a * (1 - out_a) * (tgt_a - out_a)
-        return predictedOutputVector.subtract(learnedOutputRealVector);
-    }
+    return null;
+  }
+
+  private RealVector calculateDeltaVector(RealMatrix thetaL, RealMatrix activation, RealVector nextLayerDelta) {
+    ArrayRealVector activationsVector = new ArrayRealVector(activation.getRowVector(0)); // get l-th nn layer activations
+    ArrayRealVector identity = new ArrayRealVector(activationsVector.getDimension(), 1d);
+    RealVector gz = activationsVector.ebeMultiply(identity.subtract(activationsVector)); // = a^l .* (1-a^l)
+    return thetaL.transpose().preMultiply(nextLayerDelta).ebeMultiply(gz);
+  }
+
+  private RealVector calculateOutputError(TrainingExample<Double, Double[]> trainingExample, RealMatrix[] activations) {
+    RealMatrix output = activations[activations.length - 1];
+    Double[] learnedOutput = trainingExample.getOutput(); // training example output
+    RealVector predictedOutputVector = new ArrayRealVector(output.getColumn(output.getColumnDimension() - 1)); // turn output to vector
+    RealVector learnedOutputRealVector = new ArrayRealVector(learnedOutput); // turn example output to a vector
+
+    // TODO : improve error calculation > this should be er_a = out_a * (1 - out_a) * (tgt_a - out_a)
+    return predictedOutputVector.subtract(learnedOutputRealVector);
+  }
 }

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java Wed Mar  6 12:06:06 2013
@@ -38,8 +38,7 @@ public class BasicPerceptron implements 
    * Create a perceptron given its input weights. Assume bias weight is given and all the input
    * samples have a corresponding 1 input for that
    *
-   * @param inputWeights
-   *          the array of starting weights for the perceptron
+   * @param inputWeights the array of starting weights for the perceptron
    */
   public BasicPerceptron(Double... inputWeights) {
     this.perceptronNeuron = new BinaryThresholdNeuron(0d, inputWeights);

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java Wed Mar  6 12:06:06 2013
@@ -35,7 +35,7 @@ public class LogisticRegressionCostFunct
 
   @Override
   public Double calculateCost(Collection<TrainingExample<Double, Double>> trainingExamples,
-          ActivationFunction<Double> hypothesis, RealMatrix... parameters) throws Exception {
+                              ActivationFunction<Double> hypothesis, RealMatrix... parameters) throws Exception {
 
     Double errorTerm = calculateErrorTerm(parameters, hypothesis, trainingExamples);
     Double regularizationTerm = calculateRegularizationTerm(parameters, trainingExamples);
@@ -43,7 +43,7 @@ public class LogisticRegressionCostFunct
   }
 
   private Double calculateRegularizationTerm(RealMatrix[] parameters,
-          Collection<TrainingExample<Double, Double>> trainingExamples) {
+                                             Collection<TrainingExample<Double, Double>> trainingExamples) {
     Double res = 1d;
     for (RealMatrix layerMatrix : parameters) {
       for (int i = 0; i < layerMatrix.getColumnDimension(); i++) {
@@ -58,13 +58,13 @@ public class LogisticRegressionCostFunct
   }
 
   private Double calculateErrorTerm(RealMatrix[] parameters,
-          ActivationFunction<Double> hypothesis,
-          Collection<TrainingExample<Double, Double>> trainingExamples) throws PredictionException,
+                                    ActivationFunction<Double> hypothesis,
+                                    Collection<TrainingExample<Double, Double>> trainingExamples) throws PredictionException,
           CreationException {
     Double res = 0d;
     NeuralNetwork<Double, Double> neuralNetwork = NeuralNetworkFactory.create(trainingExamples,
             parameters, new VoidLearningStrategy<Double, Double>(), new FeedForwardStrategy(
-                    hypothesis));
+            hypothesis));
 
     for (TrainingExample<Double, Double> input : trainingExamples) {
       // TODO : handle this for multiple outputs (multi class classification)

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/NeuralNetworkFactory.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/NeuralNetworkFactory.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/NeuralNetworkFactory.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/NeuralNetworkFactory.java Wed Mar  6 12:06:06 2013
@@ -33,7 +33,7 @@ public class NeuralNetworkFactory {
    * by a set of matrices, the learning and prediction strategies to be used.
    *
    * @param trainingExamples   the training set
-   * @param RealMatrixSet   the initial settings for weights matrices
+   * @param RealMatrixSet      the initial settings for weights matrices
    * @param learningStrategy   a learning strategy
    * @param predictionStrategy a prediction strategy
    * @return a NeuralNetwork instance

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ConversionUtils.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ConversionUtils.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ConversionUtils.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ConversionUtils.java Wed Mar  6 12:06:06 2013
@@ -18,10 +18,6 @@
  */
 package org.apache.yay.utils;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Vector;
-
 import org.apache.commons.math3.linear.Array2DRowRealMatrix;
 import org.apache.commons.math3.linear.OpenMapRealVector;
 import org.apache.commons.math3.linear.RealMatrix;
@@ -29,6 +25,9 @@ import org.apache.commons.math3.linear.R
 import org.apache.yay.Example;
 import org.apache.yay.Feature;
 
+import java.util.ArrayList;
+import java.util.Collection;
+
 /**
  * Temporary class for conversion between model objects and commons-math matrices/vectors
  */
@@ -36,6 +35,7 @@ public class ConversionUtils {
 
   /**
    * Converts a set of examples to a matrix of inputs with features
+   *
    * @param trainingSet samples with features of type Double
    * @return a real matrix
    */
@@ -54,6 +54,7 @@ public class ConversionUtils {
 
   /**
    * converts an example with Double features to a double array
+   *
    * @param sample the sample to convert
    * @return a double array
    */
@@ -69,6 +70,7 @@ public class ConversionUtils {
 
   /**
    * converts a vector of doubles to a real vector
+   *
    * @param input a vector of Double objects
    * @return a real vector
    */
@@ -79,6 +81,7 @@ public class ConversionUtils {
   /**
    * turns a collection of features of type <code>T</code> into a collection of
    * <code>T</code> objects.
+   *
    * @param featureVector the vector of features
    * @return a vector of Doubles
    */
@@ -86,13 +89,14 @@ public class ConversionUtils {
     // TODO : remove this and change APIs in a way that doesn't force to go through this ugly loop
     Collection<T> resultVector = new ArrayList<T>();
     for (Feature<T> feature : featureVector) {
-        resultVector.add(feature.getValue());
+      resultVector.add(feature.getValue());
     }
     return resultVector;
   }
 
   /**
    * this is just nice! :-) (thanks commons-math)
+   *
    * @param ar a double array
    * @return a Double array
    */

Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java (original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java Wed Mar  6 12:06:06 2013
@@ -18,20 +18,19 @@
  */
 package org.apache.yay.utils;
 
-import java.util.ArrayList;
-import java.util.Vector;
-
 import org.apache.yay.Example;
 import org.apache.yay.Feature;
 import org.apache.yay.TrainingExample;
 
+import java.util.ArrayList;
+
 /**
  * Factory class for {@link Example}s
  */
 public class ExamplesFactory {
 
   public static TrainingExample<Double, Double> createDoubleTrainingExample(final Double output,
-          final Double... featuresValues) {
+                                                                            final Double... featuresValues) {
     return new TrainingExample<Double, Double>() {
       @Override
       public ArrayList<Feature<Double>> getFeatures() {

Modified: labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java (original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java Wed Mar  6 12:06:06 2013
@@ -50,7 +50,7 @@ public class BasicPerceptronTest {
   }
 
   private TrainingExample<Double, Double> createTrainingExample(final Double output,
-          final Double... params) {
+                                                                final Double... params) {
     return new TrainingExample<Double, Double>() {
       @Override
       public ArrayList<Feature<Double>> getFeatures() {

Modified: labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java (original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java Wed Mar  6 12:06:06 2013
@@ -45,9 +45,9 @@ public class LogisticRegressionCostFunct
     trainingExamples.add(example2);
     trainingExamples.add(example3);
     trainingExamples.add(example4);
-    double[][] weights = { { -10d, 20d, 20d } };
+    double[][] weights = {{-10d, 20d, 20d}};
     RealMatrix singleOrLayerWeights = new Array2DRowRealMatrix(weights);
-    RealMatrix[] orWeightsMatrixSet = new RealMatrix[] { singleOrLayerWeights };
+    RealMatrix[] orWeightsMatrixSet = new RealMatrix[]{singleOrLayerWeights};
     Double cost = costFunction.calculateCost(trainingExamples, new SigmoidFunction(),
             orWeightsMatrixSet);
     assertTrue("cost should not be negative", cost > 0d);

Modified: labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java (original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/NeuralNetworkFactoryTest.java Wed Mar  6 12:06:06 2013
@@ -37,7 +37,7 @@ public class NeuralNetworkFactoryTest {
     double[][] weights = {{-30d, 20d, 20d}};
     RealMatrix singleAndLayerWeights = new Array2DRowRealMatrix(weights);
     RealMatrix[] andRealMatrixSet = new RealMatrix[]{singleAndLayerWeights};
-    NeuralNetwork<Double,Double> andNN = createFFNN(andRealMatrixSet);
+    NeuralNetwork<Double, Double> andNN = createFFNN(andRealMatrixSet);
     assertEquals(0l, Math.round(andNN.predict(createSample(1d, 0d))));
     assertEquals(0l, Math.round(andNN.predict(createSample(0d, 1d))));
     assertEquals(0l, Math.round(andNN.predict(createSample(0d, 0d))));
@@ -49,7 +49,7 @@ public class NeuralNetworkFactoryTest {
     double[][] weights = {{-10d, 20d, 20d}};
     RealMatrix singleOrLayerWeights = new Array2DRowRealMatrix(weights);
     RealMatrix[] orRealMatrixSet = new RealMatrix[]{singleOrLayerWeights};
-    NeuralNetwork<Double,Double> orNN = createFFNN(orRealMatrixSet);
+    NeuralNetwork<Double, Double> orNN = createFFNN(orRealMatrixSet);
     assertEquals(1l, Math.round(orNN.predict(createSample(1d, 0d))));
     assertEquals(1l, Math.round(orNN.predict(createSample(0d, 1d))));
     assertEquals(0l, Math.round(orNN.predict(createSample(0d, 0d))));
@@ -61,17 +61,17 @@ public class NeuralNetworkFactoryTest {
     double[][] weights = {{10d, -20d}};
     RealMatrix singleNotLayerWeights = new Array2DRowRealMatrix(weights);
     RealMatrix[] notRealMatrixSet = new RealMatrix[]{singleNotLayerWeights};
-    NeuralNetwork<Double,Double> orNN = createFFNN(notRealMatrixSet);
+    NeuralNetwork<Double, Double> orNN = createFFNN(notRealMatrixSet);
     assertEquals(1l, Math.round(orNN.predict(createSample(0d))));
     assertEquals(0l, Math.round(orNN.predict(createSample(1d))));
   }
 
   @Test
   public void norNNCreationTest() throws Exception {
-    RealMatrix firstNorLayerWeights = new Array2DRowRealMatrix(new double[][]{{0, 0, 0},{-30d, 20d, 20d}, {10d, -20d, -20d}});
+    RealMatrix firstNorLayerWeights = new Array2DRowRealMatrix(new double[][]{{0, 0, 0}, {-30d, 20d, 20d}, {10d, -20d, -20d}});
     RealMatrix secondNorLayerWeights = new Array2DRowRealMatrix(new double[][]{{-10d, 20d, 20d}});
-    RealMatrix[] norRealMatrixSet = new RealMatrix[]{firstNorLayerWeights,secondNorLayerWeights};
-    NeuralNetwork<Double,Double> norNN = createFFNN(norRealMatrixSet);
+    RealMatrix[] norRealMatrixSet = new RealMatrix[]{firstNorLayerWeights, secondNorLayerWeights};
+    NeuralNetwork<Double, Double> norNN = createFFNN(norRealMatrixSet);
     assertEquals(0l, Math.round(norNN.predict(createSample(1d, 0d))));
     assertEquals(0l, Math.round(norNN.predict(createSample(0d, 1d))));
     assertEquals(1l, Math.round(norNN.predict(createSample(0d, 0d))));
@@ -80,10 +80,10 @@ public class NeuralNetworkFactoryTest {
 
   @Test
   public void sampleCreationTest() throws Exception {
-    RealMatrix firstLayer = new Array2DRowRealMatrix(new double[][]{{1d, 1d, 2d, 3d},{1d, 1d, 2d, 3d}, {1d, 1d, 2d, 3d}});
+    RealMatrix firstLayer = new Array2DRowRealMatrix(new double[][]{{1d, 1d, 2d, 3d}, {1d, 1d, 2d, 3d}, {1d, 1d, 2d, 3d}});
     RealMatrix secondLayer = new Array2DRowRealMatrix(new double[][]{{1d, 2d, 3d}});
-    RealMatrix[] RealMatrixes = new RealMatrix[]{firstLayer,secondLayer};
-    NeuralNetwork<Double,Double> neuralNetwork = createFFNN(RealMatrixes);
+    RealMatrix[] RealMatrixes = new RealMatrix[]{firstLayer, secondLayer};
+    NeuralNetwork<Double, Double> neuralNetwork = createFFNN(RealMatrixes);
     Double prdictedValue = neuralNetwork.predict(createSample(5d, 6d, 7d));
     assertEquals(1l, Math.round(prdictedValue));
     assertEquals(Double.valueOf(0.9975273768433653d), prdictedValue);
@@ -91,8 +91,8 @@ public class NeuralNetworkFactoryTest {
 
   private NeuralNetwork<Double, Double> createFFNN(RealMatrix[] andRealMatrixSet)
           throws CreationException {
-    return NeuralNetworkFactory.create(new LinkedList<TrainingExample<Double, Double>>(), 
-            andRealMatrixSet, new VoidLearningStrategy<Double, Double>(), 
+    return NeuralNetworkFactory.create(new LinkedList<TrainingExample<Double, Double>>(),
+            andRealMatrixSet, new VoidLearningStrategy<Double, Double>(),
             new FeedForwardStrategy(new SigmoidFunction()));
   }
 

Modified: labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java
URL: http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java?rev=1453308&r1=1453307&r2=1453308&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java (original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java Wed Mar  6 12:06:06 2013
@@ -39,7 +39,7 @@ public class VoidLearningStrategyTest {
     VoidLearningStrategy<String, String> learningStrategy = new VoidLearningStrategy<String, String>();
     Collection<TrainingExample<String, String>> trainingExamples = new LinkedList<TrainingExample<String, String>>();
     RealMatrix[] weightsMatrixSet = new RealMatrix[1];
-    double[][] weights = { { 1d, 2d, }, { 2d, 4d } };
+    double[][] weights = {{1d, 2d,}, {2d, 4d}};
     weightsMatrixSet[0] = new Array2DRowRealMatrix(weights);
     RealMatrix[] learnedWeights = learningStrategy.learnWeights(weightsMatrixSet,
             trainingExamples);



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@labs.apache.org
For additional commands, e-mail: commits-help@labs.apache.org