You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hama.apache.org by ed...@apache.org on 2015/11/23 03:26:47 UTC

[1/5] hama git commit: HAMA-961: Remove ann package

Repository: hama
Updated Branches:
  refs/heads/master 0225205a9 -> 3a3ea7a37


http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMLPMessage.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMLPMessage.java b/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMLPMessage.java
deleted file mode 100644
index ba2b8c4..0000000
--- a/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMLPMessage.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.junit.Test;
-
-/**
- * Test the functionalities of SmallMLPMessage
- * 
- */
-public class TestSmallMLPMessage {
-
-  @Test
-  public void testReadWriteWithoutPrevUpdate() {
-    int owner = 101;
-    double[][] mat = { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } };
-    double[][] mat2 = { { 10, 20 }, { 30, 40 }, { 50, 60 } };
-    double[][][] mats = { mat, mat2 };
-
-    DenseDoubleMatrix[] matrices = new DenseDoubleMatrix[] {
-        new DenseDoubleMatrix(mat), new DenseDoubleMatrix(mat2) };
-
-    SmallMLPMessage message = new SmallMLPMessage(owner, true, matrices);
-
-    Configuration conf = new Configuration();
-    String strPath = "/tmp/testSmallMLPMessage";
-    Path path = new Path(strPath);
-    try {
-      FileSystem fs = FileSystem.get(new URI(strPath), conf);
-      FSDataOutputStream out = fs.create(path, true);
-      message.write(out);
-      out.close();
-
-      FSDataInputStream in = fs.open(path);
-      SmallMLPMessage outMessage = new SmallMLPMessage(0, false, null);
-      outMessage.readFields(in);
-
-      assertEquals(owner, outMessage.getOwner());
-      DenseDoubleMatrix[] outMatrices = outMessage.getWeightUpdatedMatrices();
-      // check each matrix
-      for (int i = 0; i < outMatrices.length; ++i) {
-        double[][] outMat = outMatrices[i].getValues();
-        for (int j = 0; j < outMat.length; ++j) {
-          assertArrayEquals(mats[i][j], outMat[j], 0.0001);
-        }
-      }
-
-      fs.delete(path, true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Test
-  public void testReadWriteWithPrevUpdate() {
-    int owner = 101;
-    double[][] mat = { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } };
-    double[][] mat2 = { { 10, 20 }, { 30, 40 }, { 50, 60 } };
-    double[][][] mats = { mat, mat2 };
-
-    double[][] prevMat = { { 0.1, 0.2, 0.3 }, { 0.4, 0.5, 0.6 },
-        { 0.7, 0.8, 0.9 } };
-    double[][] prevMat2 = { { 1, 2 }, { 3, 4 }, { 5, 6 } };
-    double[][][] prevMats = { prevMat, prevMat2 };
-
-    DenseDoubleMatrix[] matrices = new DenseDoubleMatrix[] {
-        new DenseDoubleMatrix(mat), new DenseDoubleMatrix(mat2) };
-
-    DenseDoubleMatrix[] prevMatrices = new DenseDoubleMatrix[] {
-        new DenseDoubleMatrix(prevMat), new DenseDoubleMatrix(prevMat2) };
-
-    boolean terminated = false;
-    SmallMLPMessage message = new SmallMLPMessage(owner, terminated, matrices,
-        prevMatrices);
-
-    Configuration conf = new Configuration();
-    String strPath = "/tmp/testSmallMLPMessageWithPrevMatrices";
-    Path path = new Path(strPath);
-    try {
-      FileSystem fs = FileSystem.get(new URI(strPath), conf);
-      FSDataOutputStream out = fs.create(path, true);
-      message.write(out);
-      out.close();
-
-      FSDataInputStream in = fs.open(path);
-      SmallMLPMessage outMessage = new SmallMLPMessage(0, false, null);
-      outMessage.readFields(in);
-
-      assertEquals(owner, outMessage.getOwner());
-      assertEquals(terminated, outMessage.isTerminated());
-      DenseDoubleMatrix[] outMatrices = outMessage.getWeightUpdatedMatrices();
-      // check each matrix
-      for (int i = 0; i < outMatrices.length; ++i) {
-        double[][] outMat = outMatrices[i].getValues();
-        for (int j = 0; j < outMat.length; ++j) {
-          assertArrayEquals(mats[i][j], outMat[j], 0.0001);
-        }
-      }
-
-      DenseDoubleMatrix[] outPrevMatrices = outMessage
-          .getPrevWeightsUpdatedMatrices();
-      // check each matrix
-      for (int i = 0; i < outPrevMatrices.length; ++i) {
-        double[][] outMat = outPrevMatrices[i].getValues();
-        for (int j = 0; j < outMat.length; ++j) {
-          assertArrayEquals(prevMats[i][j], outMat[j], 0.0001);
-        }
-      }
-
-      fs.delete(path, true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMultiLayerPerceptron.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMultiLayerPerceptron.java b/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMultiLayerPerceptron.java
deleted file mode 100644
index 02fa2da..0000000
--- a/ml/src/test/java/org/apache/hama/ml/perception/TestSmallMultiLayerPerceptron.java
+++ /dev/null
@@ -1,524 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.commons.lang.SerializationUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hama.commons.io.MatrixWritable;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.ml.util.DefaultFeatureTransformer;
-import org.apache.hama.ml.util.FeatureTransformer;
-import org.junit.Test;
-import org.mortbay.log.Log;
-
-public class TestSmallMultiLayerPerceptron {
-
-  /**
-   * Write and read the parameters of MLP.
-   */
-  @Test
-  public void testWriteReadMLP() {
-    String modelPath = "/tmp/sampleModel-testWriteReadMLP.data";
-    double learningRate = 0.3;
-    double regularization = 0.0; // no regularization
-    double momentum = 0; // no momentum
-    String squashingFunctionName = "Sigmoid";
-    String costFunctionName = "SquaredError";
-    int[] layerSizeArray = new int[] { 3, 2, 2, 3 };
-    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-    FeatureTransformer transformer = new DefaultFeatureTransformer();
-    mlp.setFeatureTransformer(transformer);
-    try {
-      mlp.writeModelToFile(modelPath);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    try {
-      // read the meta-data
-      Configuration conf = new Configuration();
-      FileSystem fs = FileSystem.get(conf);
-      mlp = new SmallMultiLayerPerceptron(modelPath);
-      assertEquals(mlp.getClass().getName(), mlp.getMLPType());
-      assertEquals(learningRate, mlp.getLearningRate(), 0.001);
-      assertEquals(regularization, mlp.isRegularization(), 0.001);
-      assertEquals(layerSizeArray.length, mlp.getNumberOfLayers());
-      assertEquals(momentum, mlp.getMomentum(), 0.001);
-      assertEquals(squashingFunctionName, mlp.getSquashingFunctionName());
-      assertEquals(costFunctionName, mlp.getCostFunctionName());
-      assertArrayEquals(layerSizeArray, mlp.getLayerSizeArray());
-      assertEquals(transformer.getClass().getName(), mlp.getFeatureTransformer().getClass().getName());
-      // delete test file
-      fs.delete(new Path(modelPath), true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Test the output of an example MLP.
-   */
-  @Test
-  public void testOutput() {
-    // write the MLP meta-data manually
-    String modelPath = "/tmp/sampleModel-testOutput.data";
-    Configuration conf = new Configuration();
-    try {
-      FileSystem fs = FileSystem.get(conf);
-      FSDataOutputStream output = fs.create(new Path(modelPath), true);
-
-      String MLPType = SmallMultiLayerPerceptron.class.getName();
-      double learningRate = 0.5;
-      double regularization = 0.0;
-      double momentum = 0.1;
-      String squashingFunctionName = "Sigmoid";
-      String costFunctionName = "SquaredError";
-      int[] layerSizeArray = new int[] { 3, 2, 3, 3 };
-      int numberOfLayers = layerSizeArray.length;
-
-      WritableUtils.writeString(output, MLPType);
-      output.writeDouble(learningRate);
-      output.writeDouble(regularization);
-      output.writeDouble(momentum);
-      output.writeInt(numberOfLayers);
-      WritableUtils.writeString(output, squashingFunctionName);
-      WritableUtils.writeString(output, costFunctionName);
-
-      // write the number of neurons for each layer
-      for (int i = 0; i < numberOfLayers; ++i) {
-        output.writeInt(layerSizeArray[i]);
-      }
-
-      double[][] matrix01 = { // 4 by 2
-      { 0.5, 0.2 }, { 0.1, 0.1 }, { 0.2, 0.5 }, { 0.1, 0.5 } };
-
-      double[][] matrix12 = { // 3 by 3
-      { 0.1, 0.2, 0.5 }, { 0.2, 0.5, 0.2 }, { 0.5, 0.5, 0.1 } };
-
-      double[][] matrix23 = { // 4 by 3
-      { 0.2, 0.5, 0.2 }, { 0.5, 0.1, 0.5 }, { 0.1, 0.2, 0.1 },
-          { 0.1, 0.2, 0.5 } };
-
-      DoubleMatrix[] matrices = { new DenseDoubleMatrix(matrix01),
-          new DenseDoubleMatrix(matrix12), new DenseDoubleMatrix(matrix23) };
-      for (DoubleMatrix mat : matrices) {
-        MatrixWritable.write(mat, output);
-      }
-
-      // serialize the feature transformer
-      FeatureTransformer transformer = new DefaultFeatureTransformer();
-      Class<? extends FeatureTransformer> featureTransformerCls = transformer.getClass();
-      byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls);
-      output.writeInt(featureTransformerBytes.length);
-      output.write(featureTransformerBytes);
-      
-      output.close();
-
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    // initial the mlp with existing model meta-data and get the output
-    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath);
-    DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 });
-    try {
-      DoubleVector result = mlp.output(input);
-      assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 },
-          result.toArray(), 0.0001);
-    } catch (Exception e1) {
-      e1.printStackTrace();
-    }
-
-    // delete meta-data
-    try {
-      FileSystem fs = FileSystem.get(conf);
-      fs.delete(new Path(modelPath), true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-  }
-
-  /**
-   * Test training with squared error on the XOR problem.
-   */
-  @Test
-  public void testTrainWithSquaredError() {
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-
-    // set parameters
-    double learningRate = 0.3;
-    double regularization = 0.02; // no regularization
-    double momentum = 0; // no momentum
-    String squashingFunctionName = "Sigmoid";
-    String costFunctionName = "SquaredError";
-    int[] layerSizeArray = new int[] { 2, 5, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-
-    try {
-      // train by multiple instances
-      Random rnd = new Random();
-      for (int i = 0; i < 100000; ++i) {
-        DenseDoubleMatrix[] weightUpdates = mlp
-            .trainByInstance(trainingData[rnd.nextInt(4)]);
-        mlp.updateWeightMatrices(weightUpdates);
-      }
-
-      // System.out.printf("Weight matrices: %s\n",
-      // mlp.weightsToString(mlp.getWeightMatrices()));
-      for (int i = 0; i < trainingData.length; ++i) {
-        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
-            .slice(2);
-        double expected = trainingData[i].toArray()[2];
-        double actual = mlp.output(testVec).toArray()[0];
-        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
-          Log.info("Neural network failes to lear the XOR.");
-        }
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Test training with cross entropy on the XOR problem.
-   */
-  @Test
-  public void testTrainWithCrossEntropy() {
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-
-    // set parameters
-    double learningRate = 0.3;
-    double regularization = 0.0; // no regularization
-    double momentum = 0; // no momentum
-    String squashingFunctionName = "Sigmoid";
-    String costFunctionName = "CrossEntropy";
-    int[] layerSizeArray = new int[] { 2, 7, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-
-    try {
-      // train by multiple instances
-      Random rnd = new Random();
-      for (int i = 0; i < 50000; ++i) {
-        DenseDoubleMatrix[] weightUpdates = mlp
-            .trainByInstance(trainingData[rnd.nextInt(4)]);
-        mlp.updateWeightMatrices(weightUpdates);
-      }
-
-      // System.out.printf("Weight matrices: %s\n",
-      // mlp.weightsToString(mlp.getWeightMatrices()));
-      for (int i = 0; i < trainingData.length; ++i) {
-        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
-            .slice(2);
-        double expected = trainingData[i].toArray()[2];
-        double actual = mlp.output(testVec).toArray()[0];
-        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
-          Log.info("Neural network failes to lear the XOR.");
-        }
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Test training with regularizatiion.
-   */
-  @Test
-  public void testWithRegularization() {
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-
-    // set parameters
-    double learningRate = 0.3;
-    double regularization = 0.02; // regularization should be a tiny number
-    double momentum = 0; // no momentum
-    String squashingFunctionName = "Sigmoid";
-    String costFunctionName = "CrossEntropy";
-    int[] layerSizeArray = new int[] { 2, 7, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-
-    try {
-      // train by multiple instances
-      Random rnd = new Random();
-      for (int i = 0; i < 20000; ++i) {
-        DenseDoubleMatrix[] weightUpdates = mlp
-            .trainByInstance(trainingData[rnd.nextInt(4)]);
-        mlp.updateWeightMatrices(weightUpdates);
-      }
-
-      // System.out.printf("Weight matrices: %s\n",
-      // mlp.weightsToString(mlp.getWeightMatrices()));
-      for (int i = 0; i < trainingData.length; ++i) {
-        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
-            .slice(2);
-        double expected = trainingData[i].toArray()[2];
-        double actual = mlp.output(testVec).toArray()[0];
-        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
-          Log.info("Neural network failes to lear the XOR.");
-        }
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Test training with momentum. The MLP can converge faster.
-   */
-  @Test
-  public void testWithMomentum() {
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-
-    // set parameters
-    double learningRate = 0.3;
-    double regularization = 0.02; // regularization should be a tiny number
-    double momentum = 0.5; // no momentum
-    String squashingFunctionName = "Sigmoid";
-    String costFunctionName = "CrossEntropy";
-    int[] layerSizeArray = new int[] { 2, 7, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-
-    try {
-      // train by multiple instances
-      Random rnd = new Random();
-      for (int i = 0; i < 5000; ++i) {
-        DenseDoubleMatrix[] weightUpdates = mlp
-            .trainByInstance(trainingData[rnd.nextInt(4)]);
-        mlp.updateWeightMatrices(weightUpdates);
-      }
-
-      // System.out.printf("Weight matrices: %s\n",
-      // mlp.weightsToString(mlp.getWeightMatrices()));
-      for (int i = 0; i < trainingData.length; ++i) {
-        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
-            .slice(2);
-        double expected = trainingData[i].toArray()[2];
-        double actual = mlp.output(testVec).toArray()[0];
-        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
-          Log.info("Neural network failes to lear the XOR.");
-        }
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-  
-  @Test
-  public void testByRunningJobs() {
-    this.testTrainingByXOR();
-    this.testFeatureTransformer();
-  }
-
-  /**
-   * Test the XOR problem.
-   */
-  public void testTrainingByXOR() {
-    // write in some training instances
-    Configuration conf = new Configuration();
-    String strDataPath = "/tmp/xor-training-by-xor";
-    Path dataPath = new Path(strDataPath);
-
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-
-    try {
-      URI uri = new URI(strDataPath);
-      FileSystem fs = FileSystem.get(uri, conf);
-      fs.delete(dataPath, true);
-      if (!fs.exists(dataPath)) {
-        fs.createNewFile(dataPath);
-        SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
-            dataPath, LongWritable.class, VectorWritable.class);
-
-        for (int i = 0; i < 1000; ++i) {
-          VectorWritable vecWritable = new VectorWritable(trainingData[i % 4]);
-          writer.append(new LongWritable(i), vecWritable);
-        }
-        writer.close();
-      }
-
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-
-    // begin training
-    String modelPath = "/tmp/xorModel-training-by-xor.data";
-    double learningRate = 0.6;
-    double regularization = 0.02; // no regularization
-    double momentum = 0.3; // no momentum
-    String squashingFunctionName = "Tanh";
-    String costFunctionName = "SquaredError";
-    int[] layerSizeArray = new int[] { 2, 5, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-
-    Map<String, String> trainingParams = new HashMap<String, String>();
-    trainingParams.put("training.iteration", "2000");
-    trainingParams.put("training.mode", "minibatch.gradient.descent");
-    trainingParams.put("training.batch.size", "100");
-    trainingParams.put("tasks", "3");
-    trainingParams.put("modelPath", modelPath);
-
-    try {
-      mlp.train(dataPath, trainingParams);
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-
-    // test the model
-    for (int i = 0; i < trainingData.length; ++i) {
-      DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i].slice(2);
-      try {
-        double expected = trainingData[i].toArray()[2];
-        double actual = mlp.output(testVec).toArray()[0];
-        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
-          Log.info("Neural network failes to lear the XOR.");
-        }
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-  }
-  
-  /**
-   * Use transformer to extract the first half features of the original features.
-   */
-  public void testFeatureTransformer() {
- // write in some training instances
-    Configuration conf = new Configuration();
-    String strDataPath = "/tmp/xor-training-by-xor";
-    Path dataPath = new Path(strDataPath);
-
-    // generate training data
-    DoubleVector[] trainingData = new DenseDoubleVector[] {
-        new DenseDoubleVector(new double[] { 0, 0, 0 }),
-        new DenseDoubleVector(new double[] { 0, 1, 1 }),
-        new DenseDoubleVector(new double[] { 1, 0, 1 }),
-        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
-    
-    try {
-      URI uri = new URI(strDataPath);
-      FileSystem fs = FileSystem.get(uri, conf);
-      fs.delete(dataPath, true);
-      if (!fs.exists(dataPath)) {
-        fs.createNewFile(dataPath);
-        SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
-            dataPath, LongWritable.class, VectorWritable.class);
-
-        for (int i = 0; i < 1000; ++i) {
-          VectorWritable vecWritable = new VectorWritable(trainingData[i % 4]);
-          writer.append(new LongWritable(i), vecWritable);
-        }
-        writer.close();
-      }
-
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-
-    // begin training
-    String modelPath = "/tmp/xorModel-training-by-xor.data";
-    double learningRate = 0.6;
-    double regularization = 0.02; // no regularization
-    double momentum = 0.3; // no momentum
-    String squashingFunctionName = "Tanh";
-    String costFunctionName = "SquaredError";
-    int[] layerSizeArray = new int[] { 1, 5, 1 };
-    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
-        regularization, momentum, squashingFunctionName, costFunctionName,
-        layerSizeArray);
-    
-    mlp.setFeatureTransformer(new FeatureTransformer() {
-
-      @Override
-      public DoubleVector transform(DoubleVector originalFeatures) {
-        return originalFeatures.sliceUnsafe(originalFeatures.getDimension() / 2);
-      }
-      
-    });
-
-    Map<String, String> trainingParams = new HashMap<String, String>();
-    trainingParams.put("training.iteration", "2000");
-    trainingParams.put("training.mode", "minibatch.gradient.descent");
-    trainingParams.put("training.batch.size", "100");
-    trainingParams.put("tasks", "3");
-    trainingParams.put("modelPath", modelPath);
-
-    try {
-      mlp.train(dataPath, trainingParams);
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/regression/TestLinearRegression.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/regression/TestLinearRegression.java b/ml/src/test/java/org/apache/hama/ml/regression/TestLinearRegression.java
deleted file mode 100644
index 54c473b..0000000
--- a/ml/src/test/java/org/apache/hama/ml/regression/TestLinearRegression.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hama.ml.regression;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleVector;
-import org.junit.Test;
-import org.mortbay.log.Log;
-
-/**
- * Test the functionalities of the linear regression model.
- * 
- */
-public class TestLinearRegression {
-
-  @Test
-  public void testLinearRegressionSimple() {
-    // y = 2.1 * x_1 + 0.7 * x_2 * 0.1 * x_3
-    double[][] instances = { { 1, 1, 1, 2.9 }, { 5, 2, 3, 12.2 },
-        { 2, 5, 8, 8.5 }, { 0.5, 0.1, 0.2, 1.14 }, { 10, 20, 30, 38 },
-        { 0.6, 20, 5, 16.76 } };
-
-    LinearRegression regression = new LinearRegression(instances[0].length - 1);
-    regression.setLearningRate(0.001);
-    regression.setMomemtumWeight(0.1);
-
-    int iterations = 100;
-    for (int i = 0; i < iterations; ++i) {
-      for (int j = 0; j < instances.length; ++j) {
-        regression.trainOnline(new DenseDoubleVector(instances[j]));
-      }
-    }
-
-    double relativeError = 0;
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector test = new DenseDoubleVector(instances[i]);
-      double expected = test.get(test.getDimension() - 1);
-      test = test.slice(test.getDimension() - 1);
-      double actual = regression.getOutput(test).get(0);
-      relativeError += Math.abs((expected - actual) / expected);
-    }
-
-    relativeError /= instances.length;
-    Log.info(String.format("Relative error %f%%\n", relativeError));
-  }
-
-  @Test
-  public void testLinearRegressionOnlineTraining() {
-    // read linear regression data
-    String filepath = "src/test/resources/linear_regression_data.txt";
-    List<double[]> instanceList = new ArrayList<double[]>();
-
-    try {
-      BufferedReader br = new BufferedReader(new FileReader(filepath));
-      String line = null;
-      while ((line = br.readLine()) != null) {
-        if (line.startsWith("#")) { // ignore comments
-          continue;
-        }
-        String[] tokens = line.trim().split(" ");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    // divide dataset into training and testing
-    List<double[]> testInstances = new ArrayList<double[]>();
-    testInstances.addAll(instanceList.subList(instanceList.size() - 20,
-        instanceList.size()));
-    List<double[]> trainingInstances = instanceList.subList(0,
-        instanceList.size() - 20);
-
-    int dimension = instanceList.get(0).length - 1;
-
-    LinearRegression regression = new LinearRegression(dimension);
-    regression.setLearningRate(0.00000005);
-    regression.setMomemtumWeight(0.1);
-    regression.setRegularizationWeight(0.05);
-    int iterations = 2000;
-    for (int i = 0; i < iterations; ++i) {
-      for (double[] trainingInstance : trainingInstances) {
-        regression.trainOnline(new DenseDoubleVector(trainingInstance));
-      }
-    }
-
-    double relativeError = 0.0;
-    // calculate the error on test instance
-    for (double[] testInstance : testInstances) {
-      DoubleVector instance = new DenseDoubleVector(testInstance);
-      double expected = instance.get(instance.getDimension() - 1);
-      instance = instance.slice(instance.getDimension() - 1);
-      double actual = regression.getOutput(instance).get(0);
-      if (expected == 0) {
-        expected = 0.0000001;
-      }
-      relativeError += Math.abs((expected - actual) / expected);
-    }
-    relativeError /= testInstances.size();
-
-    Log.info(String.format("Relative error: %f%%\n", relativeError * 100));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/regression/TestLogisticRegression.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/regression/TestLogisticRegression.java b/ml/src/test/java/org/apache/hama/ml/regression/TestLogisticRegression.java
deleted file mode 100644
index ed76d03..0000000
--- a/ml/src/test/java/org/apache/hama/ml/regression/TestLogisticRegression.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hama.ml.regression;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleVector;
-import org.junit.Test;
-import org.mortbay.log.Log;
-
-/**
- * Test the functionalities of LogisticRegression.
- * 
- */
-public class TestLogisticRegression {
-
-  @Test
-  public void testLogisticRegressionLocal() {
-    // read logistic regression data
-    String filepath = "src/test/resources/logistic_regression_data.txt";
-    List<double[]> instanceList = new ArrayList<double[]>();
-
-    try {
-      BufferedReader br = new BufferedReader(new FileReader(filepath));
-      String line = null;
-      while ((line = br.readLine()) != null) {
-        if (line.startsWith("#")) { // ignore comments
-          continue;
-        }
-        String[] tokens = line.trim().split(",");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    int dimension = instanceList.get(0).length - 1;
-
-    // min-max normalization
-    double[] mins = new double[dimension];
-    double[] maxs = new double[dimension];
-    Arrays.fill(mins, Double.MAX_VALUE);
-    Arrays.fill(maxs, Double.MIN_VALUE);
-
-    for (double[] instance : instanceList) {
-      for (int i = 0; i < instance.length - 1; ++i) {
-        if (mins[i] > instance[i]) {
-          mins[i] = instance[i];
-        }
-        if (maxs[i] < instance[i]) {
-          maxs[i] = instance[i];
-        }
-      }
-    }
-
-    for (double[] instance : instanceList) {
-      for (int i = 0; i < instance.length - 1; ++i) {
-        double range = maxs[i] - mins[i];
-        if (range != 0) {
-          instance[i] = (instance[i] - mins[i]) / range;
-        }
-      }
-    }
-
-    // divide dataset into training and testing
-    List<double[]> testInstances = new ArrayList<double[]>();
-    testInstances.addAll(instanceList.subList(instanceList.size() - 100,
-        instanceList.size()));
-    List<double[]> trainingInstances = instanceList.subList(0,
-        instanceList.size() - 100);
-
-    LogisticRegression regression = new LogisticRegression(dimension);
-    regression.setLearningRate(0.2);
-    regression.setMomemtumWeight(0.1);
-    regression.setRegularizationWeight(0.1);
-    int iterations = 1000;
-    for (int i = 0; i < iterations; ++i) {
-      for (double[] trainingInstance : trainingInstances) {
-        regression.trainOnline(new DenseDoubleVector(trainingInstance));
-      }
-    }
-
-    double errorRate = 0;
-    // calculate the error on test instance
-    for (double[] testInstance : testInstances) {
-      DoubleVector instance = new DenseDoubleVector(testInstance);
-      double expected = instance.get(instance.getDimension() - 1);
-      DoubleVector features = instance.slice(instance.getDimension() - 1);
-      double actual = regression.getOutput(features).get(0);
-      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
-        ++errorRate;
-      }
-
-    }
-    errorRate /= testInstances.size();
-
-    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
-  }
-
-}


[2/5] hama git commit: HAMA-961: Remove ann package

Posted by ed...@apache.org.
http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPTrainer.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPTrainer.java b/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPTrainer.java
deleted file mode 100644
index 8b08136..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPTrainer.java
+++ /dev/null
@@ -1,327 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.BitSet;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hama.bsp.BSPPeer;
-import org.apache.hama.bsp.sync.SyncException;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.ml.ann.NeuralNetworkTrainer;
-
-/**
- * The perceptron trainer for small scale MLP.
- */
-class SmallMLPTrainer extends NeuralNetworkTrainer {
-
-  /* used by master only, check whether all slaves finishes reading */
-  private BitSet statusSet;
-
-  private int numTrainingInstanceRead = 0;
-  /* Once reader reaches the EOF, the training procedure would be terminated */
-  private boolean terminateTraining = false;
-
-  private SmallMultiLayerPerceptron inMemoryPerceptron;
-
-  private int[] layerSizeArray;
-
-  @Override
-  protected void extraSetup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer) {
-
-    // obtain parameters
-    this.trainingMode = conf.get("training.mode", "minibatch.gradient.descent");
-    // mini-batch by default
-    this.batchSize = conf.getInt("training.batch.size", 100);
-
-    this.statusSet = new BitSet(peer.getConfiguration().getInt("tasks", 1));
-
-    String outputModelPath = conf.get("modelPath");
-    if (outputModelPath == null || outputModelPath.trim().length() == 0) {
-      try {
-        throw new Exception("Please specify output model path.");
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-
-    String modelPath = conf.get("existingModelPath");
-    // build model from scratch
-    if (modelPath == null || modelPath.trim().length() == 0) {
-      double learningRate = Double.parseDouble(conf.get("learningRate"));
-      double regularization = Double.parseDouble(conf.get("regularization"));
-      double momentum = Double.parseDouble(conf.get("momentum"));
-      String squashingFunctionName = conf.get("squashingFunctionName");
-      String costFunctionName = conf.get("costFunctionName");
-      String[] layerSizeArrayStr = conf.get("layerSizeArray").trim().split(" ");
-      this.layerSizeArray = new int[layerSizeArrayStr.length];
-      for (int i = 0; i < this.layerSizeArray.length; ++i) {
-        this.layerSizeArray[i] = Integer.parseInt(layerSizeArrayStr[i]);
-      }
-
-      this.inMemoryPerceptron = new SmallMultiLayerPerceptron(learningRate,
-          regularization, momentum, squashingFunctionName, costFunctionName,
-          layerSizeArray);
-      LOG.info("Training model from scratch.");
-    } else { // read model from existing data
-      this.inMemoryPerceptron = new SmallMultiLayerPerceptron(modelPath);
-      LOG.info("Training with existing model.");
-    }
-
-  }
-
-  @Override
-  protected void extraCleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer) {
-    LOG.info(String.format("Task %d totally read %d records.\n",
-        peer.getPeerIndex(), this.numTrainingInstanceRead));
-    // master write learned model to disk
-    if (peer.getPeerIndex() == 0) {
-      try {
-        LOG.info(String.format("Master write learned model to %s\n",
-            conf.get("modelPath")));
-        this.inMemoryPerceptron.writeModelToFile(conf.get("modelPath"));
-      } catch (IOException e) {
-        System.err.println("Please set a correct model path.");
-      }
-    }
-  }
-
-  @Override
-  public void bsp(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-    LOG.info("Start training...");
-    if (trainingMode.equalsIgnoreCase("minibatch.gradient.descent")) {
-      LOG.info("Training Mode: minibatch.gradient.descent");
-      trainByMinibatch(peer);
-    }
-
-    LOG.info(String.format("Task %d finished.", peer.getPeerIndex()));
-  }
-
-  /**
-   * Train the MLP with stochastic gradient descent.
-   * 
-   * @param peer
-   * @throws IOException
-   * @throws SyncException
-   * @throws InterruptedException
-   */
-  private void trainByMinibatch(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-
-    int maxIteration = conf.getInt("training.iteration", 1);
-    LOG.info("# of Training Iteration: " + maxIteration);
-
-    for (int i = 0; i < maxIteration; ++i) {
-      if (peer.getPeerIndex() == 0) {
-        LOG.info(String.format("Iteration [%d] begins...", i));
-      }
-      peer.reopenInput();
-      // reset status
-      if (peer.getPeerIndex() == 0) {
-        this.statusSet = new BitSet(peer.getConfiguration().getInt("tasks", 1));
-      }
-      this.terminateTraining = false;
-      peer.sync();
-      while (true) {
-        // each slate task updates weights according to training data
-        boolean terminate = updateWeights(peer);
-        peer.sync();
-
-        // master merges the updates
-        if (peer.getPeerIndex() == 0) {
-          mergeUpdate(peer);
-        }
-        peer.sync();
-
-        if (terminate) {
-          break;
-        }
-      }
-
-    }
-
-  }
-
-  /**
-   * Merge the updates from slaves task.
-   * 
-   * @param peer
-   * @throws IOException
-   */
-  private void mergeUpdate(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-    // initialize the cache
-    DenseDoubleMatrix[] mergedUpdates = this.getZeroWeightMatrices();
-
-    int numOfPartitions = peer.getNumCurrentMessages();
-
-    // aggregates the weights update
-    while (peer.getNumCurrentMessages() > 0) {
-      SmallMLPMessage message = (SmallMLPMessage) peer.getCurrentMessage();
-      if (message.isTerminated()) {
-        this.statusSet.set(message.getOwner());
-      }
-
-      DenseDoubleMatrix[] weightUpdates = message.getWeightUpdatedMatrices();
-      for (int m = 0; m < mergedUpdates.length; ++m) {
-        mergedUpdates[m] = (DenseDoubleMatrix) mergedUpdates[m]
-            .add(weightUpdates[m]);
-      }
-    }
-
-    if (numOfPartitions != 0) {
-      // calculate the global mean (the mean of batches from all slave tasks) of
-      // the weight updates
-      for (int m = 0; m < mergedUpdates.length; ++m) {
-        mergedUpdates[m] = (DenseDoubleMatrix) mergedUpdates[m]
-            .divide(numOfPartitions);
-      }
-
-      // check if all tasks finishes reading data
-      if (this.statusSet.cardinality() == conf.getInt("tasks", 1)) {
-        this.terminateTraining = true;
-      }
-
-      // update the weight matrices
-      this.inMemoryPerceptron.updateWeightMatrices(mergedUpdates);
-      this.inMemoryPerceptron.setPrevWeightUpdateMatrices(mergedUpdates);
-    }
-
-    // broadcast updated weight matrices
-    for (String peerName : peer.getAllPeerNames()) {
-      SmallMLPMessage msg = new SmallMLPMessage(peer.getPeerIndex(),
-          this.terminateTraining, this.inMemoryPerceptron.getWeightMatrices(),
-          this.inMemoryPerceptron.getPrevWeightUpdateMatrices());
-      peer.send(peerName, msg);
-    }
-
-  }
-
-  /**
-   * Train the MLP with training data.
-   * 
-   * @param peer
-   * @return Whether terminates.
-   * @throws IOException
-   */
-  private boolean updateWeights(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-    // receive update message sent by master
-    if (peer.getNumCurrentMessages() > 0) {
-      SmallMLPMessage message = (SmallMLPMessage) peer.getCurrentMessage();
-      this.terminateTraining = message.isTerminated();
-      // each slave renew its weight matrices
-      this.inMemoryPerceptron.setWeightMatrices(message
-          .getWeightUpdatedMatrices());
-      this.inMemoryPerceptron.setPrevWeightUpdateMatrices(message
-          .getPrevWeightsUpdatedMatrices());
-      if (this.terminateTraining) {
-        return true;
-      }
-    }
-
-    // update weight according to training data
-    DenseDoubleMatrix[] weightUpdates = this.getZeroWeightMatrices();
-
-    int count = 0;
-    LongWritable recordId = new LongWritable();
-    VectorWritable trainingInstance = new VectorWritable();
-    boolean hasMore = false;
-    while (count++ < this.batchSize) {
-      hasMore = peer.readNext(recordId, trainingInstance);
-
-      try {
-        DenseDoubleMatrix[] singleTrainingInstanceUpdates = this.inMemoryPerceptron
-            .trainByInstance(trainingInstance.getVector());
-        // aggregate the updates
-        for (int m = 0; m < weightUpdates.length; ++m) {
-          weightUpdates[m] = (DenseDoubleMatrix) weightUpdates[m]
-              .add(singleTrainingInstanceUpdates[m]);
-        }
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-
-      ++numTrainingInstanceRead;
-      if (!hasMore) {
-        break;
-      }
-    }
-
-    // calculate the local mean (the mean of the local batch) of weight updates
-    for (int m = 0; m < weightUpdates.length; ++m) {
-      weightUpdates[m] = (DenseDoubleMatrix) weightUpdates[m].divide(count);
-    }
-
-    LOG.info(String.format("Task %d has read %d records.", peer.getPeerIndex(),
-        this.numTrainingInstanceRead));
-
-    // send the weight updates to master task
-    SmallMLPMessage message = new SmallMLPMessage(peer.getPeerIndex(),
-        !hasMore, weightUpdates);
-    peer.send(peer.getPeerName(0), message); // send status to master
-
-    return !hasMore;
-  }
-
-  /**
-   * Initialize the weight matrices.
-   */
-  private DenseDoubleMatrix[] getZeroWeightMatrices() {
-    DenseDoubleMatrix[] weightUpdateCache = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
-    // initialize weight matrix each layer
-    for (int i = 0; i < weightUpdateCache.length; ++i) {
-      weightUpdateCache[i] = new DenseDoubleMatrix(this.layerSizeArray[i] + 1,
-          this.layerSizeArray[i + 1]);
-    }
-    return weightUpdateCache;
-  }
-
-  /**
-   * Print out the weights.
-   * 
-   * @param mat
-   * @return
-   */
-  protected static String weightsToString(DenseDoubleMatrix[] mat) {
-    StringBuilder sb = new StringBuilder();
-
-    for (int i = 0; i < mat.length; ++i) {
-      sb.append(String.format("Matrix [%d]\n", i));
-      double[][] values = mat[i].getValues();
-      for (double[] value : values) {
-        sb.append(Arrays.toString(value));
-        sb.append('\n');
-      }
-      sb.append('\n');
-    }
-    return sb.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/SmallMultiLayerPerceptron.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/SmallMultiLayerPerceptron.java b/ml/src/main/java/org/apache/hama/ml/perception/SmallMultiLayerPerceptron.java
deleted file mode 100644
index 1b6d200..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/SmallMultiLayerPerceptron.java
+++ /dev/null
@@ -1,574 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.commons.lang.SerializationUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.bsp.BSPJob;
-import org.apache.hama.commons.io.MatrixWritable;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleFunction;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.util.FeatureTransformer;
-import org.mortbay.log.Log;
-
-/**
- * SmallMultiLayerPerceptronBSP is a kind of multilayer perceptron whose
- * parameters can be fit into the memory of a single machine. This kind of model
- * can be trained and used more efficiently than the BigMultiLayerPerceptronBSP,
- * whose parameters are distributedly stored in multiple machines.
- * 
- * In general, it it is a multilayer perceptron that consists of one input
- * layer, multiple hidden layer and one output layer.
- * 
- * The number of neurons in the input layer should be consistent with the number
- * of features in the training instance. The number of neurons in the output
- * layer
- */
-public final class SmallMultiLayerPerceptron extends MultiLayerPerceptron
-    implements Writable {
-
-  /* The in-memory weight matrix */
-  private DenseDoubleMatrix[] weightMatrice;
-
-  /* Previous weight updates, used for momentum */
-  private DenseDoubleMatrix[] prevWeightUpdateMatrices;
-
-  /**
-   * @see MultiLayerPerceptron#MultiLayerPerceptron(double, double, double, String, String, int[])
-   */
-  public SmallMultiLayerPerceptron(double learningRate, double regularization,
-      double momentum, String squashingFunctionName, String costFunctionName,
-      int[] layerSizeArray) {
-    super(learningRate, regularization, momentum, squashingFunctionName,
-        costFunctionName, layerSizeArray);
-    initializeWeightMatrix();
-    this.initializePrevWeightUpdateMatrix();
-  }
-
-  /**
-   * @see MultiLayerPerceptron#MultiLayerPerceptron(String)
-   */
-  public SmallMultiLayerPerceptron(String modelPath) {
-    super(modelPath);
-    if (modelPath != null) {
-      try {
-        this.readFromModel();
-        this.initializePrevWeightUpdateMatrix();
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
-  }
-
-  /**
-   * Initialize weight matrix using Gaussian distribution. Each weight is
-   * initialized in range (-0.5, 0.5)
-   */
-  private void initializeWeightMatrix() {
-    this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1];
-    // each layer contains one bias neuron
-    for (int i = 0; i < this.numberOfLayers - 1; ++i) {
-      // add weights for bias
-      this.weightMatrice[i] = new DenseDoubleMatrix(this.layerSizeArray[i] + 1,
-          this.layerSizeArray[i + 1]);
-
-      this.weightMatrice[i].applyToElements(new DoubleFunction() {
-
-        private final Random rnd = new Random();
-
-        @Override
-        public double apply(double value) {
-          return rnd.nextDouble() - 0.5;
-        }
-
-        @Override
-        public double applyDerivative(double value) {
-          throw new UnsupportedOperationException("Not supported");
-        }
-
-      });
-
-      // int rowCount = this.weightMatrice[i].getRowCount();
-      // int colCount = this.weightMatrice[i].getColumnCount();
-      // for (int row = 0; row < rowCount; ++row) {
-      // for (int col = 0; col < colCount; ++col) {
-      // this.weightMatrice[i].set(row, col, rnd.nextDouble() - 0.5);
-      // }
-      // }
-    }
-  }
-
-  /**
-   * Initial the momentum weight matrices.
-   */
-  private void initializePrevWeightUpdateMatrix() {
-    this.prevWeightUpdateMatrices = new DenseDoubleMatrix[this.numberOfLayers - 1];
-    for (int i = 0; i < this.prevWeightUpdateMatrices.length; ++i) {
-      int row = this.layerSizeArray[i] + 1;
-      int col = this.layerSizeArray[i + 1];
-      this.prevWeightUpdateMatrices[i] = new DenseDoubleMatrix(row, col);
-    }
-  }
-
-  @Override
-  /**
-   * {@inheritDoc}
-   * The model meta-data is stored in memory.
-   */
-  public DoubleVector outputWrapper(DoubleVector featureVector) {
-    List<double[]> outputCache = this.outputInternal(featureVector);
-    // the output of the last layer is the output of the MLP
-    return new DenseDoubleVector(outputCache.get(outputCache.size() - 1));
-  }
-
-  private List<double[]> outputInternal(DoubleVector featureVector) {
-    // store the output of the hidden layers and output layer, each array store
-    // one layer
-    List<double[]> outputCache = new ArrayList<double[]>();
-
-    // start from the first hidden layer
-    double[] intermediateResults = new double[this.layerSizeArray[0] + 1];
-    if (intermediateResults.length - 1 != featureVector.getDimension()) {
-      throw new IllegalStateException(
-          "Input feature dimension incorrect! The dimension of input layer is "
-              + (this.layerSizeArray[0] - 1)
-              + ", but the dimension of input feature is "
-              + featureVector.getDimension());
-    }
-
-    // fill with input features
-    intermediateResults[0] = 1.0; // bias
-
-    // transform the original features to another space
-    featureVector = this.featureTransformer.transform(featureVector);
-
-    for (int i = 0; i < featureVector.getDimension(); ++i) {
-      intermediateResults[i + 1] = featureVector.get(i);
-    }
-    outputCache.add(intermediateResults);
-
-    // forward the intermediate results to next layer
-    for (int fromLayer = 0; fromLayer < this.numberOfLayers - 1; ++fromLayer) {
-      intermediateResults = forward(fromLayer, intermediateResults);
-      outputCache.add(intermediateResults);
-    }
-
-    return outputCache;
-  }
-
-  /**
-   * Calculate the intermediate results of layer fromLayer + 1.
-   * 
-   * @param fromLayer The index of layer that forwards the intermediate results
-   *          from.
-   * @return the value of intermediate results of layer.
-   */
-  private double[] forward(int fromLayer, double[] intermediateResult) {
-    int toLayer = fromLayer + 1;
-    double[] results = null;
-    int offset = 0;
-
-    if (toLayer < this.layerSizeArray.length - 1) { // add bias if it is not
-                                                    // output layer
-      results = new double[this.layerSizeArray[toLayer] + 1];
-      offset = 1;
-      results[0] = 1.0; // the bias
-    } else {
-      results = new double[this.layerSizeArray[toLayer]]; // no bias
-    }
-
-    for (int neuronIdx = 0; neuronIdx < this.layerSizeArray[toLayer]; ++neuronIdx) {
-      // aggregate the results from previous layer
-      for (int prevNeuronIdx = 0; prevNeuronIdx < this.layerSizeArray[fromLayer] + 1; ++prevNeuronIdx) {
-        results[neuronIdx + offset] += this.weightMatrice[fromLayer].get(
-            prevNeuronIdx, neuronIdx) * intermediateResult[prevNeuronIdx];
-      }
-      // calculate via squashing function
-      results[neuronIdx + offset] = this.squashingFunction
-          .apply(results[neuronIdx + offset]);
-    }
-
-    return results;
-  }
-
-  /**
-   * Get the updated weights using one training instance.
-   * 
-   * @param trainingInstance The trainingInstance is the concatenation of
-   *          feature vector and class label vector.
-   * @return The update of each weight.
-   * @throws Exception
-   */
-  DenseDoubleMatrix[] trainByInstance(DoubleVector trainingInstance)
-      throws Exception {
-    // initialize weight update matrices
-    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
-    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
-      weightUpdateMatrices[m] = new DenseDoubleMatrix(
-          this.layerSizeArray[m] + 1, this.layerSizeArray[m + 1]);
-    }
-
-    if (trainingInstance == null) {
-      return weightUpdateMatrices;
-    }
-
-    // transform the features (exclude the labels) to new space
-    double[] trainingVec = trainingInstance.toArray();
-    double[] trainingFeature = this.featureTransformer.transform(
-        trainingInstance.sliceUnsafe(0, this.layerSizeArray[0] - 1)).toArray();
-    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
-        this.layerSizeArray[0], trainingVec.length);
-
-    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
-    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);
-
-    // calculate the delta of output layer
-    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
-    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
-    double[] lastHiddenLayerOutput = outputCache.get(outputCache.size() - 2);
-
-    DenseDoubleMatrix prevWeightUpdateMatrix = this.prevWeightUpdateMatrices[this.prevWeightUpdateMatrices.length - 1];
-    for (int j = 0; j < delta.length; ++j) {
-      delta[j] = this.costFunction.applyDerivative(trainingLabels[j],
-          outputLayerOutput[j]);
-      // add regularization term
-      if (this.regularization != 0.0) {
-        double derivativeRegularization = 0.0;
-        DenseDoubleMatrix weightMatrix = this.weightMatrice[this.weightMatrice.length - 1];
-        for (int k = 0; k < this.layerSizeArray[this.layerSizeArray.length - 1]; ++k) {
-          derivativeRegularization += weightMatrix.get(k, j);
-        }
-        derivativeRegularization /= this.layerSizeArray[this.layerSizeArray.length - 1];
-        delta[j] += this.regularization * derivativeRegularization;
-      }
-
-      delta[j] *= this.squashingFunction.applyDerivative(outputLayerOutput[j]);
-
-      // calculate the weight update matrix between the last hidden layer and
-      // the output layer
-      for (int i = 0; i < this.layerSizeArray[this.layerSizeArray.length - 2] + 1; ++i) {
-        double updatedValue = -this.learningRate * delta[j]
-            * lastHiddenLayerOutput[i];
-        // add momentum
-        updatedValue += this.momentum * prevWeightUpdateMatrix.get(i, j);
-        weightUpdateMatrices[weightUpdateMatrices.length - 1].set(i, j,
-            updatedValue);
-      }
-    }
-
-    // calculate the delta for each hidden layer through back-propagation
-    for (int l = this.layerSizeArray.length - 2; l >= 1; --l) {
-      delta = backpropagate(l, delta, outputCache, weightUpdateMatrices);
-    }
-
-    return weightUpdateMatrices;
-  }
-
-  /**
-   * Back-propagate the errors from nextLayer to prevLayer. The weight updated
-   * information will be stored in the weightUpdateMatrices, and the delta of
-   * the prevLayer would be returned.
-   * 
-   * @param curLayerIdx The layer index of the current layer.
-   * @param nextLayerDelta The delta of the next layer.
-   * @param outputCache The cache of the output of all the layers.
-   * @param weightUpdateMatrices The weight update matrices.
-   * @return The delta of the previous layer, will be used for next iteration of
-   *         back-propagation.
-   */
-  private double[] backpropagate(int curLayerIdx, double[] nextLayerDelta,
-      List<double[]> outputCache, DenseDoubleMatrix[] weightUpdateMatrices) {
-    int prevLayerIdx = curLayerIdx - 1;
-    double[] delta = new double[this.layerSizeArray[curLayerIdx]];
-    double[] curLayerOutput = outputCache.get(curLayerIdx);
-    double[] prevLayerOutput = outputCache.get(prevLayerIdx);
-
-    // DenseDoubleMatrix prevWeightUpdateMatrix = this.prevWeightUpdateMatrices[curLayerIdx - 1];
-    // for each neuron j in nextLayer, calculate the delta
-    for (int j = 0; j < delta.length; ++j) {
-      // aggregate delta from next layer
-      for (int k = 0; k < nextLayerDelta.length; ++k) {
-        double weight = this.weightMatrice[curLayerIdx].get(j, k);
-        delta[j] += weight * nextLayerDelta[k];
-      }
-      delta[j] *= this.squashingFunction.applyDerivative(curLayerOutput[j + 1]);
-
-      // calculate the weight update matrix between the previous layer and the
-      // current layer
-      for (int i = 0; i < weightUpdateMatrices[prevLayerIdx].getRowCount(); ++i) {
-        double updatedValue = -this.learningRate * delta[j]
-            * prevLayerOutput[i];
-        // add momemtum
-        // updatedValue += this.momentum * prevWeightUpdateMatrix.get(i, j);
-        weightUpdateMatrices[prevLayerIdx].set(i, j, updatedValue);
-      }
-    }
-
-    return delta;
-  }
-
-  @Override
-  /**
-   * {@inheritDoc}
-   */
-  public void train(Path dataInputPath, Map<String, String> trainingParams)
-      throws IOException, InterruptedException, ClassNotFoundException {
-    // create the BSP training job
-    Configuration conf = new Configuration();
-    for (Map.Entry<String, String> entry : trainingParams.entrySet()) {
-      conf.set(entry.getKey(), entry.getValue());
-    }
-
-    // put model related parameters
-    if (modelPath == null || modelPath.trim().length() == 0) { // build model
-                                                               // from scratch
-      conf.set("MLPType", this.MLPType);
-      conf.set("learningRate", "" + this.learningRate);
-      conf.set("regularization", "" + this.regularization);
-      conf.set("momentum", "" + this.momentum);
-      conf.set("squashingFunctionName", this.squashingFunctionName);
-      conf.set("costFunctionName", this.costFunctionName);
-      StringBuilder layerSizeArraySb = new StringBuilder();
-      for (int layerSize : this.layerSizeArray) {
-        layerSizeArraySb.append(layerSize);
-        layerSizeArraySb.append(' ');
-      }
-      conf.set("layerSizeArray", layerSizeArraySb.toString());
-    }
-
-    HamaConfiguration hamaConf = new HamaConfiguration(conf);
-
-    BSPJob job = new BSPJob(hamaConf, SmallMLPTrainer.class);
-    job.setJobName("Small scale MLP training");
-    job.setJarByClass(SmallMLPTrainer.class);
-    job.setBspClass(SmallMLPTrainer.class);
-    job.setInputPath(dataInputPath);
-    job.setInputFormat(org.apache.hama.bsp.SequenceFileInputFormat.class);
-    job.setInputKeyClass(LongWritable.class);
-    job.setInputValueClass(VectorWritable.class);
-    job.setOutputKeyClass(NullWritable.class);
-    job.setOutputValueClass(NullWritable.class);
-    job.setOutputFormat(org.apache.hama.bsp.NullOutputFormat.class);
-
-    int numTasks = conf.getInt("tasks", 1);
-    job.setNumBspTask(numTasks);
-    job.waitForCompletion(true);
-
-    // reload learned model
-    Log.info(String.format("Reload model from %s.",
-        trainingParams.get("modelPath")));
-    this.modelPath = trainingParams.get("modelPath");
-    this.readFromModel();
-  }
-
-  @SuppressWarnings("rawtypes")
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    this.MLPType = WritableUtils.readString(input);
-    this.learningRate = input.readDouble();
-    this.regularization = input.readDouble();
-    this.momentum = input.readDouble();
-    this.numberOfLayers = input.readInt();
-    this.squashingFunctionName = WritableUtils.readString(input);
-    this.costFunctionName = WritableUtils.readString(input);
-
-    this.squashingFunction = FunctionFactory
-        .createDoubleFunction(this.squashingFunctionName);
-    this.costFunction = FunctionFactory
-        .createDoubleDoubleFunction(this.costFunctionName);
-
-    // read the number of neurons for each layer
-    this.layerSizeArray = new int[this.numberOfLayers];
-    for (int i = 0; i < numberOfLayers; ++i) {
-      this.layerSizeArray[i] = input.readInt();
-    }
-    this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1];
-    for (int i = 0; i < numberOfLayers - 1; ++i) {
-      this.weightMatrice[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
-    }
-
-    // read feature transformer
-    int bytesLen = input.readInt();
-    byte[] featureTransformerBytes = new byte[bytesLen];
-    for (int i = 0; i < featureTransformerBytes.length; ++i) {
-      featureTransformerBytes[i] = input.readByte();
-    }
-    Class featureTransformerCls = (Class) SerializationUtils
-        .deserialize(featureTransformerBytes);
-    Constructor constructor = featureTransformerCls.getConstructors()[0];
-    try {
-      this.featureTransformer = (FeatureTransformer) constructor
-          .newInstance(new Object[] {});
-    } catch (InstantiationException e) {
-      e.printStackTrace();
-    } catch (IllegalAccessException e) {
-      e.printStackTrace();
-    } catch (IllegalArgumentException e) {
-      e.printStackTrace();
-    } catch (InvocationTargetException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    WritableUtils.writeString(output, MLPType);
-    output.writeDouble(learningRate);
-    output.writeDouble(regularization);
-    output.writeDouble(momentum);
-    output.writeInt(numberOfLayers);
-    WritableUtils.writeString(output, squashingFunctionName);
-    WritableUtils.writeString(output, costFunctionName);
-
-    // write the number of neurons for each layer
-    for (int i = 0; i < this.numberOfLayers; ++i) {
-      output.writeInt(this.layerSizeArray[i]);
-    }
-    for (int i = 0; i < numberOfLayers - 1; ++i) {
-      MatrixWritable matrixWritable = new MatrixWritable(this.weightMatrice[i]);
-      matrixWritable.write(output);
-    }
-
-    // serialize the feature transformer
-    Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer
-        .getClass();
-    byte[] featureTransformerBytes = SerializationUtils
-        .serialize(featureTransformerCls);
-    output.writeInt(featureTransformerBytes.length);
-    output.write(featureTransformerBytes);
-  }
-
-  /**
-   * Read the model meta-data from the specified location.
-   * 
-   * @throws IOException
-   */
-  @Override
-  protected void readFromModel() throws IOException {
-    Configuration conf = new Configuration();
-    try {
-      URI uri = new URI(modelPath);
-      FileSystem fs = FileSystem.get(uri, conf);
-      FSDataInputStream is = new FSDataInputStream(fs.open(new Path(modelPath)));
-      this.readFields(is);
-      if (!this.MLPType.equals(this.getClass().getName())) {
-        throw new IllegalStateException(String.format(
-            "Model type incorrect, cannot load model '%s' for '%s'.",
-            this.MLPType, this.getClass().getName()));
-      }
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Write the model to file.
-   * 
-   * @throws IOException
-   */
-  @Override
-  public void writeModelToFile(String modelPath) throws IOException {
-    Configuration conf = new Configuration();
-    FileSystem fs = FileSystem.get(conf);
-    FSDataOutputStream stream = fs.create(new Path(modelPath), true);
-    this.write(stream);
-    stream.close();
-  }
-
-  DenseDoubleMatrix[] getWeightMatrices() {
-    return this.weightMatrice;
-  }
-
-  DenseDoubleMatrix[] getPrevWeightUpdateMatrices() {
-    return this.prevWeightUpdateMatrices;
-  }
-
-  void setWeightMatrices(DenseDoubleMatrix[] newMatrices) {
-    this.weightMatrice = newMatrices;
-  }
-
-  void setPrevWeightUpdateMatrices(
-      DenseDoubleMatrix[] newPrevWeightUpdateMatrices) {
-    this.prevWeightUpdateMatrices = newPrevWeightUpdateMatrices;
-  }
-
-  /**
-   * Update the weight matrices with given updates.
-   * 
-   * @param updateMatrices The updates weights in matrix format.
-   */
-  void updateWeightMatrices(DenseDoubleMatrix[] updateMatrices) {
-    for (int m = 0; m < this.weightMatrice.length; ++m) {
-      this.weightMatrice[m] = (DenseDoubleMatrix) this.weightMatrice[m]
-          .add(updateMatrices[m]);
-    }
-  }
-
-  /**
-   * Print out the weights.
-   * 
-   * @param mat
-   * @return the weights value.
-   */
-  static String weightsToString(DenseDoubleMatrix[] mat) {
-    StringBuilder sb = new StringBuilder();
-
-    for (int i = 0; i < mat.length; ++i) {
-      sb.append(String.format("Matrix [%d]\n", i));
-      double[][] values = mat[i].getValues();
-      for (double[] value : values) {
-        sb.append(Arrays.toString(value));
-        sb.append('\n');
-      }
-      sb.append('\n');
-    }
-    return sb.toString();
-  }
-
-  @Override
-  protected String getTypeName() {
-    return this.getClass().getName();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/regression/LinearRegression.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/regression/LinearRegression.java b/ml/src/main/java/org/apache/hama/ml/regression/LinearRegression.java
deleted file mode 100644
index 50e3b08..0000000
--- a/ml/src/main/java/org/apache/hama/ml/regression/LinearRegression.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hama.ml.regression;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.ann.SmallLayeredNeuralNetwork;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-/**
- * Linear regression model. It can be used for numeric regression or prediction.
- * 
- */
-public class LinearRegression {
-
-  /* Internal model */
-  private final SmallLayeredNeuralNetwork ann;
-
-  public LinearRegression(int dimension) {
-    ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.addLayer(1, true,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-  }
-
-  public LinearRegression(String modelPath) {
-    ann = new SmallLayeredNeuralNetwork(modelPath);
-  }
-
-  /**
-   * Set the learning rate, recommend in range (0, 0.01]. Note that linear
-   * regression are easy to get diverge if the learning rate is not small
-   * enough.
-   * 
-   * @param learningRate
-   */
-  public LinearRegression setLearningRate(double learningRate) {
-    ann.setLearningRate(learningRate);
-    return this;
-  }
-
-  /**
-   * Get the learning rate.
-   */
-  public double getLearningRate() {
-    return ann.getLearningRate();
-  }
-
-  /**
-   * Set the weight of the momemtum. Recommend in range [0, 1.0]. Too large
-   * momemtum weight may make model hard to converge.
-   * 
-   * @param momemtumWeight
-   */
-  public LinearRegression setMomemtumWeight(double momemtumWeight) {
-    ann.setMomemtumWeight(momemtumWeight);
-    return this;
-  }
-
-  /**
-   * Get the weight of momemtum.
-   * 
-   * @return the monemtum weight value.
-   */
-  public double getMomemtumWeight() {
-    return ann.getMomemtumWeight();
-  }
-
-  /**
-   * Set the weight of regularization, recommend in range [0, 0.1]. Too large
-   * regularization will mislead the model.
-   * 
-   * @param regularizationWeight
-   */
-  public LinearRegression setRegularizationWeight(double regularizationWeight) {
-    ann.setRegularizationWeight(regularizationWeight);
-    return this;
-  }
-
-  /**
-   * Get the weight of regularization.
-   * 
-   * @return the regularizatioin weight value.
-   */
-  public double getRegularizationWeight() {
-    return ann.getRegularizationWeight();
-  }
-
-  /**
-   * Train the linear regression model with one instance. It is HIGHLY
-   * RECOMMENDED to normalize the data first.
-   * 
-   * @param trainingInstance
-   */
-  public void trainOnline(DoubleVector trainingInstance) {
-    // ann.trainOnline(trainingInstance);
-    DoubleMatrix[] updates = ann.trainByInstance(trainingInstance);
-    // System.out.printf("%s\n", updates[0]);
-    ann.updateWeightMatrices(updates);
-  }
-
-  /**
-   * Train the model with given data. It is HIGHLY RECOMMENDED to normalize the
-   * data first.
-   * 
-   * @param dataInputPath The file path that contains the training instance.
-   * @param trainingParams The training parameters.
-   * @throws IOException
-   * @throws InterruptedException
-   * @throws ClassNotFoundException
-   */
-  public void train(Path dataInputPath, Map<String, String> trainingParams) {
-    ann.train(dataInputPath, trainingParams);
-  }
-
-  /**
-   * Get the output according to given input instance.
-   * 
-   * @param instance
-   * @return a new vector with the result of the operation.
-   */
-  public DoubleVector getOutput(DoubleVector instance) {
-    return ann.getOutput(instance);
-  }
-
-  /**
-   * Set the path to store the model. Note this is just set the path, it does
-   * not save the model. You should call writeModelToFile to save the model.
-   * 
-   * @param modelPath
-   */
-  public void setModelPath(String modelPath) {
-    ann.setModelPath(modelPath);
-  }
-
-  /**
-   * Save the model to specified model path.
-   */
-  public void writeModelToFile() {
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Get the weights of the model.
-   * 
-   * @return a new vector with the weights of the model.
-   */
-  public DoubleVector getWeights() {
-    return ann.getWeightsByLayer(0).getRowVector(0);
-  }
-  
-  /**
-   * Set the feature transformer.
-   * @param featureTransformer
-   */
-  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
-    this.ann.setFeatureTransformer(featureTransformer);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/regression/LogisticRegression.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/regression/LogisticRegression.java b/ml/src/main/java/org/apache/hama/ml/regression/LogisticRegression.java
deleted file mode 100644
index dd990c7..0000000
--- a/ml/src/main/java/org/apache/hama/ml/regression/LogisticRegression.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hama.ml.regression;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.ann.SmallLayeredNeuralNetwork;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * The logistic regression model. It can be used to conduct 2-class
- * classification.
- * 
- */
-public class LogisticRegression {
-  
-  private final SmallLayeredNeuralNetwork ann;
-  
-  public LogisticRegression(int dimension) {
-    this.ann = new SmallLayeredNeuralNetwork();
-    this.ann.addLayer(dimension, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    this.ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    this.ann.setCostFunction(FunctionFactory.createDoubleDoubleFunction("CrossEntropy"));
-  }
-  
-  public LogisticRegression(String modelPath) {
-    this.ann = new SmallLayeredNeuralNetwork(modelPath);
-  }
-  
-  /**
-   * Set the learning rate, recommend in range (0, 0.01]. Note that linear
-   * regression are easy to get diverge if the learning rate is not small
-   * enough.
-   * 
-   * @param learningRate
-   */
-  public LogisticRegression setLearningRate(double learningRate) {
-    ann.setLearningRate(learningRate);
-    return this;
-  }
-
-  /**
-   * Get the learning rate.
-   */
-  public double getLearningRate() {
-    return ann.getLearningRate();
-  }
-
-  /**
-   * Set the weight of the momemtum. Recommend in range [0, 1.0]. Too large
-   * momemtum weight may make model hard to converge.
-   * 
-   * @param momemtumWeight
-   */
-  public LogisticRegression setMomemtumWeight(double momemtumWeight) {
-    ann.setMomemtumWeight(momemtumWeight);
-    return this;
-  }
-
-  /**
-   * Get the weight of momemtum.
-   * 
-   * @return the monemtum weight value.
-   */
-  public double getMomemtumWeight() {
-    return ann.getMomemtumWeight();
-  }
-
-  /**
-   * Set the weight of regularization, recommend in range [0, 0.1]. Too large
-   * regularization will mislead the model.
-   * 
-   * @param regularizationWeight
-   */
-  public LogisticRegression setRegularizationWeight(double regularizationWeight) {
-    ann.setRegularizationWeight(regularizationWeight);
-    return this;
-  }
-
-  /**
-   * Get the weight of regularization.
-   * 
-   * @return the regularizatioin weight value.
-   */
-  public double getRegularizationWeight() {
-    return ann.getRegularizationWeight();
-  }
-
-  /**
-   * Train the linear regression model with one instance. It is HIGHLY
-   * RECOMMENDED to normalize the data first.
-   * 
-   * @param trainingInstance
-   */
-  public void trainOnline(DoubleVector trainingInstance) {
-    ann.trainOnline(trainingInstance);
-  }
-
-  /**
-   * Train the model with given data. It is HIGHLY RECOMMENDED to normalize the
-   * data first.
-   * 
-   * @param dataInputPath The file path that contains the training instance.
-   * @param trainingParams The training parameters.
-   * @throws IOException
-   * @throws InterruptedException
-   * @throws ClassNotFoundException
-   */
-  public void train(Path dataInputPath, Map<String, String> trainingParams) {
-    ann.train(dataInputPath, trainingParams);
-  }
-
-  /**
-   * Get the output according to given input instance.
-   * 
-   * @param instance
-   * @return a new vector with the result of the operation.
-   */
-  public DoubleVector getOutput(DoubleVector instance) {
-    return ann.getOutput(instance);
-  }
-
-  /**
-   * Set the path to store the model. Note this is just set the path, it does
-   * not save the model. You should call writeModelToFile to save the model.
-   * 
-   * @param modelPath
-   */
-  public void setModelPath(String modelPath) {
-    ann.setModelPath(modelPath);
-  }
-
-  /**
-   * Save the model to specified model path.
-   */
-  public void writeModelToFile() {
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Get the weights of the model.
-   * 
-   * @return a new vector with the weights of the model.
-   */
-  public DoubleVector getWeights() {
-    return ann.getWeightsByLayer(0).getRowVector(0);
-  }
-
-  /**
-   * Set the feature transformer.
-   * @param featureTransformer
-   */
-  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
-    this.ann.setFeatureTransformer(featureTransformer);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/ann/TestAutoEncoder.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/ann/TestAutoEncoder.java b/ml/src/test/java/org/apache/hama/ml/ann/TestAutoEncoder.java
deleted file mode 100644
index 0077cb0..0000000
--- a/ml/src/test/java/org/apache/hama/ml/ann/TestAutoEncoder.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.ml.MLTestBase;
-import org.junit.Test;
-import org.mortbay.log.Log;
-
-/**
- * Test the functionality of {@link AutoEncoder}.
- * 
- */
-public class TestAutoEncoder extends MLTestBase {
-
-  @Test
-  public void testAutoEncoderSimple() {
-    double[][] instances = { { 0, 0, 0, 1 }, { 0, 0, 1, 0 }, { 0, 1, 0, 0 },
-        { 0, 0, 0, 0 } };
-    AutoEncoder encoder = new AutoEncoder(4, 2);
-    encoder.setLearningRate(0.5);
-    encoder.setMomemtumWeight(0.2);
-    
-    int maxIteration = 2000;
-    Random rnd = new Random();
-    for (int iteration = 0; iteration < maxIteration; ++iteration) {
-      for (int i = 0; i < instances.length; ++i) {
-        encoder.trainOnline(new DenseDoubleVector(instances[rnd.nextInt(instances.length)]));
-      }
-    }
-
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector encodeVec = encoder.encode(new DenseDoubleVector(
-          instances[i]));
-      DoubleVector decodeVec = encoder.decode(encodeVec);
-      for (int d = 0; d < instances[i].length; ++d) {
-        assertEquals(instances[i][d], decodeVec.get(d), 0.1);
-      }
-    }
-
-  }
-  
-  @Test
-  public void testAutoEncoderSwissRollDataset() {
-    List<double[]> instanceList = new ArrayList<double[]>();
-    try {
-      BufferedReader br = new BufferedReader(new FileReader("src/test/resources/dimensional_reduction.txt"));
-      String line = null;
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.split("\t");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < instance.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-      // normalize instances
-      zeroOneNormalization(instanceList, instanceList.get(0).length);
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (NumberFormatException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    
-    List<DoubleVector> vecInstanceList = new ArrayList<DoubleVector>();
-    for (double[] instance : instanceList) {
-      vecInstanceList.add(new DenseDoubleVector(instance));
-    }
-    AutoEncoder encoder = new AutoEncoder(3, 2);
-    encoder.setLearningRate(0.05);
-    encoder.setMomemtumWeight(0.1);
-    int maxIteration = 2000;
-    for (int iteration = 0; iteration < maxIteration; ++iteration) {
-      for (DoubleVector vector : vecInstanceList) {
-        encoder.trainOnline(vector);
-      }
-    }
-
-    double errorInstance = 0;
-    for (DoubleVector vector : vecInstanceList) {
-      DoubleVector decoded = encoder.getOutput(vector);
-      DoubleVector diff = vector.subtract(decoded);
-      double error = diff.dot(diff);
-      if (error > 0.1) {
-        ++errorInstance;
-      }
-    }
-    Log.info(String.format("Autoecoder error rate: %f%%\n", errorInstance * 100 / vecInstanceList.size()));
-    
-  }
-  
-  @Test
-  public void testAutoEncoderSwissRollDatasetDistributed() {
-    String strDataPath = "/tmp/dimensional_reduction.txt";
-    Path path = new Path(strDataPath);
-    List<double[]> instanceList = new ArrayList<double[]>();
-    try {
-      Configuration conf = new Configuration();
-      FileSystem fs = FileSystem.get(new URI(strDataPath), conf);
-      if (fs.exists(path)) {
-        fs.delete(path, true);
-      }
-      
-      String line = null;
-      BufferedReader br = new BufferedReader(new FileReader("src/test/resources/dimensional_reduction.txt"));
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.split("\t");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < instance.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-      // normalize instances
-      zeroOneNormalization(instanceList, instanceList.get(0).length);
-      
-      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
-      for (int i = 0; i < instanceList.size(); ++i) {
-        DoubleVector vector = new DenseDoubleVector(instanceList.get(i));
-        writer.append(new LongWritable(i), new VectorWritable(vector));
-      }
-      
-      writer.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-    
-    AutoEncoder encoder = new AutoEncoder(3, 2);
-    String modelPath = "/tmp/autoencoder-modelpath";
-    encoder.setModelPath(modelPath);
-    Map<String, String> trainingParams = new HashMap<String, String>();
-    encoder.setLearningRate(0.5);
-    trainingParams.put("tasks", "5");
-    trainingParams.put("training.max.iterations", "3000");
-    trainingParams.put("training.batch.size", "200");
-    encoder.train(path, trainingParams);
-    
-    double errorInstance = 0;
-    for (double[] instance : instanceList) {
-      DoubleVector vector = new DenseDoubleVector(instance);
-      DoubleVector decoded = encoder.getOutput(vector);
-      DoubleVector diff = vector.subtract(decoded);
-      double error = diff.dot(diff);
-      if (error > 0.1) {
-        ++errorInstance;
-      }
-    }
-    Log.info(String.format("Autoecoder error rate: %f%%\n", errorInstance * 100 / instanceList.size()));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetwork.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetwork.java b/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetwork.java
deleted file mode 100644
index 8ad88af..0000000
--- a/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetwork.java
+++ /dev/null
@@ -1,643 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.MLTestBase;
-import org.apache.hama.ml.ann.AbstractLayeredNeuralNetwork.LearningStyle;
-import org.apache.hama.ml.ann.AbstractLayeredNeuralNetwork.TrainingMethod;
-import org.apache.hama.ml.util.DefaultFeatureTransformer;
-import org.apache.hama.ml.util.FeatureTransformer;
-import org.junit.Test;
-import org.mortbay.log.Log;
-
-/**
- * Test the functionality of SmallLayeredNeuralNetwork.
- * 
- */
-public class TestSmallLayeredNeuralNetwork extends MLTestBase {
-
-  @Test
-  public void testReadWrite() {
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(2, false,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.addLayer(5, false,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.addLayer(1, true,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    double learningRate = 0.2;
-    ann.setLearningRate(learningRate);
-    double momentumWeight = 0.5;
-    ann.setMomemtumWeight(momentumWeight);
-    double regularizationWeight = 0.05;
-    ann.setRegularizationWeight(regularizationWeight);
-    // intentionally initialize all weights to 0.5
-    DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
-    matrices[0] = new DenseDoubleMatrix(5, 3, 0.2);
-    matrices[1] = new DenseDoubleMatrix(1, 6, 0.8);
-    ann.setWeightMatrices(matrices);
-    ann.setLearningStyle(LearningStyle.UNSUPERVISED);
-    
-    FeatureTransformer defaultFeatureTransformer = new DefaultFeatureTransformer();
-    ann.setFeatureTransformer(defaultFeatureTransformer);
-    
-
-    // write to file
-    String modelPath = "/tmp/testSmallLayeredNeuralNetworkReadWrite";
-    ann.setModelPath(modelPath);
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    // read from file
-    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
-    assertEquals(annCopy.getClass().getSimpleName(), annCopy.getModelType());
-    assertEquals(modelPath, annCopy.getModelPath());
-    assertEquals(learningRate, annCopy.getLearningRate(), 0.000001);
-    assertEquals(momentumWeight, annCopy.getMomemtumWeight(), 0.000001);
-    assertEquals(regularizationWeight, annCopy.getRegularizationWeight(),
-        0.000001);
-    assertEquals(TrainingMethod.GRADIENT_DESCENT, annCopy.getTrainingMethod());
-    assertEquals(LearningStyle.UNSUPERVISED, annCopy.getLearningStyle());
-
-    // compare weights
-    DoubleMatrix[] weightsMatrices = annCopy.getWeightMatrices();
-    for (int i = 0; i < weightsMatrices.length; ++i) {
-      DoubleMatrix expectMat = matrices[i];
-      DoubleMatrix actualMat = weightsMatrices[i];
-      for (int j = 0; j < expectMat.getRowCount(); ++j) {
-        for (int k = 0; k < expectMat.getColumnCount(); ++k) {
-          assertEquals(expectMat.get(j, k), actualMat.get(j, k), 0.000001);
-        }
-      }
-    }
-    
-    FeatureTransformer copyTransformer = annCopy.getFeatureTransformer();
-    assertEquals(defaultFeatureTransformer.getClass().getName(), copyTransformer.getClass().getName());
-  }
-
-  @Test
-  /**
-   * Test the forward functionality.
-   */
-  public void testOutput() {
-    // first network
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(2, false,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.addLayer(5, false,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.addLayer(1, true,
-        FunctionFactory.createDoubleFunction("IdentityFunction"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann.setLearningRate(0.1);
-    // intentionally initialize all weights to 0.5
-    DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
-    matrices[0] = new DenseDoubleMatrix(5, 3, 0.5);
-    matrices[1] = new DenseDoubleMatrix(1, 6, 0.5);
-    ann.setWeightMatrices(matrices);
-
-    double[] arr = new double[] { 0, 1 };
-    DoubleVector training = new DenseDoubleVector(arr);
-    DoubleVector result = ann.getOutput(training);
-    assertEquals(1, result.getDimension());
-    // assertEquals(3, result.get(0), 0.000001);
-
-    // second network
-    SmallLayeredNeuralNetwork ann2 = new SmallLayeredNeuralNetwork();
-    ann2.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann2.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann2.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann2.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann2.setLearningRate(0.3);
-    // intentionally initialize all weights to 0.5
-    DoubleMatrix[] matrices2 = new DenseDoubleMatrix[2];
-    matrices2[0] = new DenseDoubleMatrix(3, 3, 0.5);
-    matrices2[1] = new DenseDoubleMatrix(1, 4, 0.5);
-    ann2.setWeightMatrices(matrices2);
-
-    double[] test = { 0, 0 };
-    double[] result2 = { 0.807476 };
-
-    DoubleVector vec = ann2.getOutput(new DenseDoubleVector(test));
-    assertArrayEquals(result2, vec.toArray(), 0.000001);
-
-    SmallLayeredNeuralNetwork ann3 = new SmallLayeredNeuralNetwork();
-    ann3.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann3.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann3.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann3.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann3.setLearningRate(0.3);
-    // intentionally initialize all weights to 0.5
-    DoubleMatrix[] initMatrices = new DenseDoubleMatrix[2];
-    initMatrices[0] = new DenseDoubleMatrix(3, 3, 0.5);
-    initMatrices[1] = new DenseDoubleMatrix(1, 4, 0.5);
-    ann3.setWeightMatrices(initMatrices);
-
-    double[] instance = { 0, 1 };
-    DoubleVector output = ann3.getOutput(new DenseDoubleVector(instance));
-    assertEquals(0.8315410, output.get(0), 0.000001);
-  }
-
-  @Test
-  public void testXORlocal() {
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann.setLearningRate(0.5);
-    ann.setMomemtumWeight(0.0);
-
-    int iterations = 50000; // iteration should be set to a very large number
-    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
-    for (int i = 0; i < iterations; ++i) {
-      DoubleMatrix[] matrices = null;
-      for (int j = 0; j < instances.length; ++j) {
-        matrices = ann.trainByInstance(new DenseDoubleVector(instances[j
-            % instances.length]));
-        ann.updateWeightMatrices(matrices);
-      }
-    }
-
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = ann.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-
-    // write model into file and read out
-    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocal";
-    ann.setModelPath(modelPath);
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
-    // test on instances
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = annCopy.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-  }
-
-  @Test
-  public void testXORWithMomentum() {
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann.setLearningRate(0.6);
-    ann.setMomemtumWeight(0.3);
-
-    int iterations = 2000; // iteration should be set to a very large number
-    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
-    for (int i = 0; i < iterations; ++i) {
-      for (int j = 0; j < instances.length; ++j) {
-        ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
-      }
-    }
-
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = ann.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-
-    // write model into file and read out
-    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithMomentum";
-    ann.setModelPath(modelPath);
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
-    // test on instances
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = annCopy.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-  }
-
-  @Test
-  public void testXORLocalWithRegularization() {
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-    ann.setLearningRate(0.7);
-    ann.setMomemtumWeight(0.5);
-    ann.setRegularizationWeight(0.002);
-
-    int iterations = 5000; // iteration should be set to a very large number
-    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
-    for (int i = 0; i < iterations; ++i) {
-      for (int j = 0; j < instances.length; ++j) {
-        ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
-      }
-    }
-
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = ann.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-
-    // write model into file and read out
-    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithRegularization";
-    ann.setModelPath(modelPath);
-    try {
-      ann.writeModelToFile();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
-    // test on instances
-    for (int i = 0; i < instances.length; ++i) {
-      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
-      // the expected output is the last element in array
-      double result = instances[i][2];
-      double actual = annCopy.getOutput(input).get(0);
-      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
-        Log.info("Neural network failes to lear the XOR.");
-      }
-    }
-  }
-
-  @Test
-  public void testTwoClassClassification() {
-    // use logistic regression data
-    String filepath = "src/test/resources/logistic_regression_data.txt";
-    List<double[]> instanceList = new ArrayList<double[]>();
-
-    try {
-      BufferedReader br = new BufferedReader(new FileReader(filepath));
-      String line = null;
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.trim().split(",");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
-    
-    int dimension = instanceList.get(0).length - 1;
-
-    // divide dataset into training and testing
-    List<double[]> testInstances = new ArrayList<double[]>();
-    testInstances.addAll(instanceList.subList(instanceList.size() - 100,
-        instanceList.size()));
-    List<double[]> trainingInstances = instanceList.subList(0,
-        instanceList.size() - 100);
-
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.setLearningRate(0.001);
-    ann.setMomemtumWeight(0.1);
-    ann.setRegularizationWeight(0.01);
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("CrossEntropy"));
-
-    long start = new Date().getTime();
-    int iterations = 1000;
-    for (int i = 0; i < iterations; ++i) {
-      for (double[] trainingInstance : trainingInstances) {
-        ann.trainOnline(new DenseDoubleVector(trainingInstance));
-      }
-    }
-    long end = new Date().getTime();
-    Log.info(String.format("Training time: %fs\n",
-        (double) (end - start) / 1000));
-
-    double errorRate = 0;
-    // calculate the error on test instance
-    for (double[] testInstance : testInstances) {
-      DoubleVector instance = new DenseDoubleVector(testInstance);
-      double expected = instance.get(instance.getDimension() - 1);
-      instance = instance.slice(instance.getDimension() - 1);
-      double actual = ann.getOutput(instance).get(0);
-      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
-        ++errorRate;
-      }
-    }
-    errorRate /= testInstances.size();
-
-    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
-  }
-  
-  @Test
-  public void testLogisticRegression() {
-    this.testLogisticRegressionDistributedVersion();
-    this.testLogisticRegressionDistributedVersionWithFeatureTransformer();
-  }
-
-  public void testLogisticRegressionDistributedVersion() {
-    // write data into a sequence file
-    String tmpStrDatasetPath = "/tmp/logistic_regression_data";
-    Path tmpDatasetPath = new Path(tmpStrDatasetPath);
-    String strDataPath = "src/test/resources/logistic_regression_data.txt";
-    String modelPath = "/tmp/logistic-regression-distributed-model";
-
-    Configuration conf = new Configuration();
-    List<double[]> instanceList = new ArrayList<double[]>();
-    List<double[]> trainingInstances = null;
-    List<double[]> testInstances = null;
-
-    try {
-      FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
-      fs.delete(tmpDatasetPath, true);
-      if (fs.exists(tmpDatasetPath)) {
-        fs.createNewFile(tmpDatasetPath);
-      }
-
-      BufferedReader br = new BufferedReader(new FileReader(strDataPath));
-      String line = null;
-      int count = 0;
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.trim().split(",");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-      
-      zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
-      
-      // write training data to temporal sequence file
-      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
-          tmpDatasetPath, LongWritable.class, VectorWritable.class);
-      int testSize = 150;
-
-      Collections.shuffle(instanceList);
-      testInstances = new ArrayList<double[]>();
-      testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
-          instanceList.size()));
-      trainingInstances = instanceList.subList(0, instanceList.size()
-          - testSize);
-
-      for (double[] instance : trainingInstances) {
-        DoubleVector vec = new DenseDoubleVector(instance);
-        writer.append(new LongWritable(count++), new VectorWritable(vec));
-      }
-      writer.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-
-    // create model
-    int dimension = 8;
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.setLearningRate(0.7);
-    ann.setMomemtumWeight(0.5);
-    ann.setRegularizationWeight(0.1);
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("CrossEntropy"));
-    ann.setModelPath(modelPath);
-
-    long start = new Date().getTime();
-    Map<String, String> trainingParameters = new HashMap<String, String>();
-    trainingParameters.put("tasks", "5");
-    trainingParameters.put("training.max.iterations", "2000");
-    trainingParameters.put("training.batch.size", "300");
-    trainingParameters.put("convergence.check.interval", "1000");
-    ann.train(tmpDatasetPath, trainingParameters);
-
-    long end = new Date().getTime();
-
-    // validate results
-    double errorRate = 0;
-    // calculate the error on test instance
-    for (double[] testInstance : testInstances) {
-      DoubleVector instance = new DenseDoubleVector(testInstance);
-      double expected = instance.get(instance.getDimension() - 1);
-      instance = instance.slice(instance.getDimension() - 1);
-      double actual = ann.getOutput(instance).get(0);
-      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
-        ++errorRate;
-      }
-    }
-    errorRate /= testInstances.size();
-
-    Log.info(String.format("Training time: %fs\n",
-        (double) (end - start) / 1000));
-    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
-  }
-  
-  public void testLogisticRegressionDistributedVersionWithFeatureTransformer() {
-    // write data into a sequence file
-    String tmpStrDatasetPath = "/tmp/logistic_regression_data_feature_transformer";
-    Path tmpDatasetPath = new Path(tmpStrDatasetPath);
-    String strDataPath = "src/test/resources/logistic_regression_data.txt";
-    String modelPath = "/tmp/logistic-regression-distributed-model-feature-transformer";
-
-    Configuration conf = new Configuration();
-    List<double[]> instanceList = new ArrayList<double[]>();
-    List<double[]> trainingInstances = null;
-    List<double[]> testInstances = null;
-
-    try {
-      FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
-      fs.delete(tmpDatasetPath, true);
-      if (fs.exists(tmpDatasetPath)) {
-        fs.createNewFile(tmpDatasetPath);
-      }
-
-      BufferedReader br = new BufferedReader(new FileReader(strDataPath));
-      String line = null;
-      int count = 0;
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.trim().split(",");
-        double[] instance = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          instance[i] = Double.parseDouble(tokens[i]);
-        }
-        instanceList.add(instance);
-      }
-      br.close();
-      
-      zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
-      
-      // write training data to temporal sequence file
-      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
-          tmpDatasetPath, LongWritable.class, VectorWritable.class);
-      int testSize = 150;
-
-      Collections.shuffle(instanceList);
-      testInstances = new ArrayList<double[]>();
-      testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
-          instanceList.size()));
-      trainingInstances = instanceList.subList(0, instanceList.size()
-          - testSize);
-
-      for (double[] instance : trainingInstances) {
-        DoubleVector vec = new DenseDoubleVector(instance);
-        writer.append(new LongWritable(count++), new VectorWritable(vec));
-      }
-      writer.close();
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-
-    // create model
-    int dimension = 8;
-    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-    ann.setLearningRate(0.7);
-    ann.setMomemtumWeight(0.5);
-    ann.setRegularizationWeight(0.1);
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(dimension, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
-    ann.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("CrossEntropy"));
-    ann.setModelPath(modelPath);
-    
-    FeatureTransformer featureTransformer = new DefaultFeatureTransformer();
-    
-    ann.setFeatureTransformer(featureTransformer);
-
-    long start = new Date().getTime();
-    Map<String, String> trainingParameters = new HashMap<String, String>();
-    trainingParameters.put("tasks", "5");
-    trainingParameters.put("training.max.iterations", "2000");
-    trainingParameters.put("training.batch.size", "300");
-    trainingParameters.put("convergence.check.interval", "1000");
-    ann.train(tmpDatasetPath, trainingParameters);
-    
-
-    long end = new Date().getTime();
-
-    // validate results
-    double errorRate = 0;
-    // calculate the error on test instance
-    for (double[] testInstance : testInstances) {
-      DoubleVector instance = new DenseDoubleVector(testInstance);
-      double expected = instance.get(instance.getDimension() - 1);
-      instance = instance.slice(instance.getDimension() - 1);
-      instance = featureTransformer.transform(instance);
-      double actual = ann.getOutput(instance).get(0);
-      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
-        ++errorRate;
-      }
-    }
-    errorRate /= testInstances.size();
-
-    Log.info(String.format("Training time: %fs\n",
-        (double) (end - start) / 1000));
-    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetworkMessage.java
----------------------------------------------------------------------
diff --git a/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetworkMessage.java b/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetworkMessage.java
deleted file mode 100644
index 148be6e..0000000
--- a/ml/src/test/java/org/apache/hama/ml/ann/TestSmallLayeredNeuralNetworkMessage.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.junit.Test;
-
-/**
- * Test the functionalities of SmallLayeredNeuralNetworkMessage.
- * 
- */
-public class TestSmallLayeredNeuralNetworkMessage {
-
-  @Test
-  public void testReadWriteWithoutPrev() {
-    double error = 0.22;
-    double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
-        { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
-    double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
-    DoubleMatrix[] matrices = new DoubleMatrix[2];
-    matrices[0] = new DenseDoubleMatrix(matrix1);
-    matrices[1] = new DenseDoubleMatrix(matrix2);
-
-    boolean isConverge = false;
-
-    SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
-        error, isConverge, matrices, null);
-    Configuration conf = new Configuration();
-    String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessage";
-    Path path = new Path(strPath);
-    try {
-      FileSystem fs = FileSystem.get(new URI(strPath), conf);
-      FSDataOutputStream out = fs.create(path);
-      message.write(out);
-      out.close();
-
-      FSDataInputStream in = fs.open(path);
-      SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
-          0, isConverge, null, null);
-      readMessage.readFields(in);
-      in.close();
-      assertEquals(error, readMessage.getTrainingError(), 0.000001);
-      assertFalse(readMessage.isConverge());
-      DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
-      assertEquals(2, readMatrices.length);
-      for (int i = 0; i < readMatrices.length; ++i) {
-        double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
-            .getValues();
-        double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
-            .getValues();
-        for (int r = 0; r < doubleMatrices.length; ++r) {
-          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
-        }
-      }
-
-      DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
-      assertNull(readPrevMatrices);
-
-      // delete
-      fs.delete(path, true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Test
-  public void testReadWriteWithPrev() {
-    double error = 0.22;
-    boolean isConverge = true;
-
-    double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
-        { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
-    double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
-    DoubleMatrix[] matrices = new DoubleMatrix[2];
-    matrices[0] = new DenseDoubleMatrix(matrix1);
-    matrices[1] = new DenseDoubleMatrix(matrix2);
-
-    double[][] prevMatrix1 = new double[][] { { 0.1, 0.1, 0.2, 0.3 },
-        { 0.2, 0.4, 0.1, 0.5 }, { 0.5, 0.1, 0.5, 0.2 } };
-    double[][] prevMatrix2 = new double[][] { { 0.1, 0.2, 0.5, 0.9 },
-        { 0.3, 0.5, 0.2, 0.6 }, { 0.6, 0.8, 0.7, 0.5 } };
-
-    DoubleMatrix[] prevMatrices = new DoubleMatrix[2];
-    prevMatrices[0] = new DenseDoubleMatrix(prevMatrix1);
-    prevMatrices[1] = new DenseDoubleMatrix(prevMatrix2);
-
-    SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
-        error, isConverge, matrices, prevMatrices);
-    Configuration conf = new Configuration();
-    String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessageWithPrev";
-    Path path = new Path(strPath);
-    try {
-      FileSystem fs = FileSystem.get(new URI(strPath), conf);
-      FSDataOutputStream out = fs.create(path);
-      message.write(out);
-      out.close();
-
-      FSDataInputStream in = fs.open(path);
-      SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
-          0, isConverge, null, null);
-      readMessage.readFields(in);
-      in.close();
-
-      assertTrue(readMessage.isConverge());
-
-      DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
-      assertEquals(2, readMatrices.length);
-      for (int i = 0; i < readMatrices.length; ++i) {
-        double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
-            .getValues();
-        double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
-            .getValues();
-        for (int r = 0; r < doubleMatrices.length; ++r) {
-          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
-        }
-      }
-
-      DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
-      assertEquals(2, readPrevMatrices.length);
-      for (int i = 0; i < readPrevMatrices.length; ++i) {
-        double[][] doubleMatrices = ((DenseDoubleMatrix) readPrevMatrices[i])
-            .getValues();
-        double[][] doubleExpected = ((DenseDoubleMatrix) prevMatrices[i])
-            .getValues();
-        for (int r = 0; r < doubleMatrices.length; ++r) {
-          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
-        }
-      }
-
-      // delete
-      fs.delete(path, true);
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-  }
-
-}


[5/5] hama git commit: HAMA-961: Remove ann package

Posted by ed...@apache.org.
HAMA-961: Remove ann package


Project: http://git-wip-us.apache.org/repos/asf/hama/repo
Commit: http://git-wip-us.apache.org/repos/asf/hama/commit/3a3ea7a3
Tree: http://git-wip-us.apache.org/repos/asf/hama/tree/3a3ea7a3
Diff: http://git-wip-us.apache.org/repos/asf/hama/diff/3a3ea7a3

Branch: refs/heads/master
Commit: 3a3ea7a37743b5f3759a86460e63ae414b2e9081
Parents: 0225205
Author: Edward J. Yoon <ed...@apache.org>
Authored: Mon Nov 23 11:10:32 2015 +0900
Committer: Edward J. Yoon <ed...@apache.org>
Committed: Mon Nov 23 11:24:49 2015 +0900

----------------------------------------------------------------------
 .../apache/hama/commons/math/CrossEntropy.java  |  58 --
 .../hama/commons/math/FunctionFactory.java      |  65 --
 .../hama/commons/math/IdentityFunction.java     |  36 -
 .../org/apache/hama/commons/math/Sigmoid.java   |  39 --
 .../apache/hama/commons/math/SquaredError.java  |  46 --
 .../java/org/apache/hama/commons/math/Tanh.java |  36 -
 .../hama/commons/math/TestFunctionFactory.java  |  82 ---
 .../org/apache/hama/bsp/TestPartitioning.java   |   2 +-
 .../org/apache/hama/examples/ExampleDriver.java |   2 -
 .../org/apache/hama/examples/NeuralNetwork.java | 216 ------
 .../apache/hama/examples/NeuralNetworkTest.java | 140 ----
 .../neuralnets_classification_label.txt         |   1 -
 .../neuralnets_classification_test.txt          |   1 -
 .../neuralnets_classification_training.txt      | 668 -------------------
 .../ml/ann/AbstractLayeredNeuralNetwork.java    | 261 --------
 .../org/apache/hama/ml/ann/AutoEncoder.java     | 197 ------
 .../org/apache/hama/ml/ann/NeuralNetwork.java   | 271 --------
 .../hama/ml/ann/NeuralNetworkTrainer.java       | 107 ---
 .../hama/ml/ann/SmallLayeredNeuralNetwork.java  | 567 ----------------
 .../ann/SmallLayeredNeuralNetworkMessage.java   | 126 ----
 .../ann/SmallLayeredNeuralNetworkTrainer.java   | 244 -------
 .../apache/hama/ml/perception/MLPMessage.java   |  45 --
 .../ml/perception/MultiLayerPerceptron.java     | 203 ------
 .../hama/ml/perception/PerceptronTrainer.java   |  96 ---
 .../hama/ml/perception/SmallMLPMessage.java     | 133 ----
 .../hama/ml/perception/SmallMLPTrainer.java     | 327 ---------
 .../perception/SmallMultiLayerPerceptron.java   | 574 ----------------
 .../hama/ml/regression/LinearRegression.java    | 188 ------
 .../hama/ml/regression/LogisticRegression.java  | 180 -----
 .../org/apache/hama/ml/ann/TestAutoEncoder.java | 195 ------
 .../ml/ann/TestSmallLayeredNeuralNetwork.java   | 643 ------------------
 .../TestSmallLayeredNeuralNetworkMessage.java   | 172 -----
 .../hama/ml/perception/TestSmallMLPMessage.java | 147 ----
 .../TestSmallMultiLayerPerceptron.java          | 524 ---------------
 .../ml/regression/TestLinearRegression.java     | 133 ----
 .../ml/regression/TestLogisticRegression.java   | 130 ----
 36 files changed, 1 insertion(+), 6854 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/CrossEntropy.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/CrossEntropy.java b/commons/src/main/java/org/apache/hama/commons/math/CrossEntropy.java
deleted file mode 100644
index 1378fc4..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/CrossEntropy.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * The cross entropy cost function.
- * 
- * <pre>
- * cost(t, y) = - t * log(y) - (1 - t) * log(1 - y),
- * where t denotes the target value, y denotes the estimated value.
- * </pre>
- */
-public class CrossEntropy extends DoubleDoubleFunction {
-
-  @Override
-  public double apply(double target, double actual) {
-    double adjustedTarget = (target == 0 ? 0.000001 : target);
-    adjustedTarget = (target == 1.0 ? 0.999999 : target);
-    double adjustedActual = (actual == 0 ? 0.000001 : actual);
-    adjustedActual = (actual == 1 ? 0.999999 : actual);
-    return -adjustedTarget * Math.log(adjustedActual) - (1 - adjustedTarget)
-        * Math.log(1 - adjustedActual);
-  }
-
-  @Override
-  public double applyDerivative(double target, double actual) {
-    double adjustedTarget = target;
-    double adjustedActual = actual;
-    if (adjustedActual == 1) {
-      adjustedActual = 0.999;
-    } else if (actual == 0) {
-      adjustedActual = 0.001;
-    }
-    if (adjustedTarget == 1) {
-      adjustedTarget = 0.999;
-    } else if (adjustedTarget == 0) {
-      adjustedTarget = 0.001;
-    }
-    return -adjustedTarget / adjustedActual + (1 - adjustedTarget)
-        / (1 - adjustedActual);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/FunctionFactory.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/FunctionFactory.java b/commons/src/main/java/org/apache/hama/commons/math/FunctionFactory.java
deleted file mode 100644
index 15c48be..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/FunctionFactory.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * Factory to create the functions.
- * 
- */
-public class FunctionFactory {
-
-  /**
-   * Create a double function with specified name.
-   * 
-   * @param functionName
-   * @return an appropriate double function.
-   */
-  public static DoubleFunction createDoubleFunction(String functionName) {
-    if (functionName.equalsIgnoreCase(Sigmoid.class.getSimpleName())) {
-      return new Sigmoid();
-    } else if (functionName.equalsIgnoreCase(Tanh.class.getSimpleName())) {
-      return new Tanh();
-    } else if (functionName.equalsIgnoreCase(IdentityFunction.class
-        .getSimpleName())) {
-      return new IdentityFunction();
-    }
-
-    throw new IllegalArgumentException(String.format(
-        "No double function with name '%s' exists.", functionName));
-  }
-
-  /**
-   * Create a double double function with specified name.
-   * 
-   * @param functionName
-   * @return an appropriate double double function.
-   */
-  public static DoubleDoubleFunction createDoubleDoubleFunction(
-      String functionName) {
-    if (functionName.equalsIgnoreCase(SquaredError.class.getSimpleName())) {
-      return new SquaredError();
-    } else if (functionName
-        .equalsIgnoreCase(CrossEntropy.class.getSimpleName())) {
-      return new CrossEntropy();
-    }
-
-    throw new IllegalArgumentException(String.format(
-        "No double double function with name '%s' exists.", functionName));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/IdentityFunction.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/IdentityFunction.java b/commons/src/main/java/org/apache/hama/commons/math/IdentityFunction.java
deleted file mode 100644
index 6b60aad..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/IdentityFunction.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * The identity function f(x) = x.
- * 
- */
-public class IdentityFunction extends DoubleFunction {
-
-  @Override
-  public double apply(double value) {
-    return value;
-  }
-
-  @Override
-  public double applyDerivative(double value) {
-    return 1;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/Sigmoid.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/Sigmoid.java b/commons/src/main/java/org/apache/hama/commons/math/Sigmoid.java
deleted file mode 100644
index eb3e9c6..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/Sigmoid.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * The Sigmoid function
- * 
- * <pre>
- * f(x) = 1 / (1 + e^{-x})
- * </pre>
- */
-public class Sigmoid extends DoubleFunction {
-
-  @Override
-  public double apply(double value) {
-    return 1.0 / (1 + Math.exp(-value));
-  }
-
-  @Override
-  public double applyDerivative(double value) {
-    return value * (1 - value);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/SquaredError.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/SquaredError.java b/commons/src/main/java/org/apache/hama/commons/math/SquaredError.java
deleted file mode 100644
index 42ff81b..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/SquaredError.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * Square error cost function.
- * 
- * <pre>
- * cost(t, y) = 0.5 * (t - y) &circ; 2
- * </pre>
- */
-public class SquaredError extends DoubleDoubleFunction {
-
-  @Override
-  /**
-   * {@inheritDoc}
-   */
-  public double apply(double target, double actual) {
-    double diff = target - actual;
-    return 0.5 * diff * diff;
-  }
-
-  @Override
-  /**
-   * {@inheritDoc}
-   */
-  public double applyDerivative(double target, double actual) {
-    return actual - target;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/main/java/org/apache/hama/commons/math/Tanh.java
----------------------------------------------------------------------
diff --git a/commons/src/main/java/org/apache/hama/commons/math/Tanh.java b/commons/src/main/java/org/apache/hama/commons/math/Tanh.java
deleted file mode 100644
index c1ef6cb..0000000
--- a/commons/src/main/java/org/apache/hama/commons/math/Tanh.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-/**
- * Tanh function.
- * 
- */
-public class Tanh extends DoubleFunction {
-
-  @Override
-  public double apply(double value) {
-    return Math.tanh(value);
-  }
-
-  @Override
-  public double applyDerivative(double value) {
-    return 1 - value * value;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/commons/src/test/java/org/apache/hama/commons/math/TestFunctionFactory.java
----------------------------------------------------------------------
diff --git a/commons/src/test/java/org/apache/hama/commons/math/TestFunctionFactory.java b/commons/src/test/java/org/apache/hama/commons/math/TestFunctionFactory.java
deleted file mode 100644
index 43a4bcf..0000000
--- a/commons/src/test/java/org/apache/hama/commons/math/TestFunctionFactory.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.commons.math;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.Random;
-
-import org.junit.Test;
-
-/**
- * Test case for {@link FunctionFactory}
- * 
- */
-public class TestFunctionFactory {
-
-  @Test
-  public void testCreateDoubleFunction() {
-    double input = 0.8;
-    
-    String sigmoidName = "Sigmoid";
-    DoubleFunction sigmoidFunction = FunctionFactory
-        .createDoubleFunction(sigmoidName);
-    assertEquals(sigmoidName, sigmoidFunction.getFunctionName());
-    
-    double sigmoidExcepted = 0.68997448;
-    assertEquals(sigmoidExcepted, sigmoidFunction.apply(input), 0.000001);
-    
-    
-    String tanhName = "Tanh";
-    DoubleFunction tanhFunction = FunctionFactory.createDoubleFunction(tanhName);
-    assertEquals(tanhName, tanhFunction.getFunctionName());
-    
-    double tanhExpected = 0.66403677;
-    assertEquals(tanhExpected, tanhFunction.apply(input), 0.00001);
-    
-    
-    String identityFunctionName = "IdentityFunction";
-    DoubleFunction identityFunction = FunctionFactory.createDoubleFunction(identityFunctionName);
-    
-    Random rnd = new Random();
-    double identityExpected = rnd.nextDouble();
-    assertEquals(identityExpected, identityFunction.apply(identityExpected), 0.000001);
-  }
-  
-  @Test
-  public void testCreateDoubleDoubleFunction() {
-    double target = 0.5;
-    double output = 0.8;
-    
-    String squaredErrorName = "SquaredError";
-    DoubleDoubleFunction squaredErrorFunction = FunctionFactory.createDoubleDoubleFunction(squaredErrorName);
-    assertEquals(squaredErrorName, squaredErrorFunction.getFunctionName());
-    
-    double squaredErrorExpected = 0.045;
-    
-    assertEquals(squaredErrorExpected, squaredErrorFunction.apply(target, output), 0.000001);
-    
-    String crossEntropyName = "CrossEntropy";
-    DoubleDoubleFunction crossEntropyFunction = FunctionFactory.createDoubleDoubleFunction(crossEntropyName);
-    assertEquals(crossEntropyName, crossEntropyFunction.getFunctionName());
-    
-    double crossEntropyExpected = 0.91629;
-    assertEquals(crossEntropyExpected, crossEntropyFunction.apply(target, output), 0.000001);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/core/src/test/java/org/apache/hama/bsp/TestPartitioning.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/hama/bsp/TestPartitioning.java b/core/src/test/java/org/apache/hama/bsp/TestPartitioning.java
index 00c63fd..674c42a 100644
--- a/core/src/test/java/org/apache/hama/bsp/TestPartitioning.java
+++ b/core/src/test/java/org/apache/hama/bsp/TestPartitioning.java
@@ -55,7 +55,7 @@ public class TestPartitioning extends HamaCluster {
     configuration.set("bsp.local.dir", "/tmp/hama-test");
     configuration.set(Constants.ZOOKEEPER_QUORUM, "localhost");
     configuration.setInt(Constants.ZOOKEEPER_CLIENT_PORT, 21810);
-    configuration.set("hama.sync.client.class",
+    configuration.set("hama.sync.peer.class",
         org.apache.hama.bsp.sync.ZooKeeperSyncClientImpl.class
             .getCanonicalName());
   }

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/main/java/org/apache/hama/examples/ExampleDriver.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/hama/examples/ExampleDriver.java b/examples/src/main/java/org/apache/hama/examples/ExampleDriver.java
index 08559e6..89d289d 100644
--- a/examples/src/main/java/org/apache/hama/examples/ExampleDriver.java
+++ b/examples/src/main/java/org/apache/hama/examples/ExampleDriver.java
@@ -39,8 +39,6 @@ public class ExampleDriver {
       pgd.addClass("semi", SemiClusterJobDriver.class, "Semi Clustering");
       pgd.addClass("kmeans", Kmeans.class, "K-Means Clustering");
       pgd.addClass("gd", GradientDescentExample.class, "Gradient Descent");
-      pgd.addClass("neuralnets", NeuralNetwork.class,
-          "Neural Network classification");
       pgd.addClass("kcore", KCore.class, "kcore");
       pgd.addClass("gen", Generator.class, "Random Data Generator Util");
       pgd.driver(args);

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java b/examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java
deleted file mode 100644
index ef029a6..0000000
--- a/examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.examples;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.InputStreamReader;
-import java.io.OutputStreamWriter;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.ann.SmallLayeredNeuralNetwork;
-
-/**
- * The example of using {@link SmallLayeredNeuralNetwork}, including the
- * training phase and labeling phase.
- */
-public class NeuralNetwork {
-
-  public static void main(String[] args) throws Exception {
-    if (args.length < 3) {
-      printUsage();
-      return;
-    }
-    String mode = args[0];
-    if (mode.equalsIgnoreCase("label")) {
-      if (args.length < 4) {
-        printUsage();
-        return;
-      }
-      HamaConfiguration conf = new HamaConfiguration();
-
-      String featureDataPath = args[1];
-      String resultDataPath = args[2];
-      String modelPath = args[3];
-
-      SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork(modelPath);
-
-      // process data in streaming approach
-      FileSystem fs = FileSystem.get(new URI(featureDataPath), conf);
-      BufferedReader br = new BufferedReader(new InputStreamReader(
-          fs.open(new Path(featureDataPath))));
-      Path outputPath = new Path(resultDataPath);
-      if (fs.exists(outputPath)) {
-        fs.delete(outputPath, true);
-      }
-      BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
-          fs.create(outputPath)));
-
-      String line = null;
-
-      while ((line = br.readLine()) != null) {
-        if (line.trim().length() == 0) {
-          continue;
-        }
-        String[] tokens = line.trim().split(",");
-        double[] vals = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          vals[i] = Double.parseDouble(tokens[i]);
-        }
-        DoubleVector instance = new DenseDoubleVector(vals);
-        DoubleVector result = ann.getOutput(instance);
-        double[] arrResult = result.toArray();
-        StringBuilder sb = new StringBuilder();
-        for (int i = 0; i < arrResult.length; ++i) {
-          sb.append(arrResult[i]);
-          if (i != arrResult.length - 1) {
-            sb.append(",");
-          } else {
-            sb.append("\n");
-          }
-        }
-        bw.write(sb.toString());
-      }
-
-      br.close();
-      bw.close();
-    } else if (mode.equals("train")) {
-      if (args.length < 5) {
-        printUsage();
-        return;
-      }
-
-      String trainingDataPath = args[1];
-      String trainedModelPath = args[2];
-
-      int featureDimension = Integer.parseInt(args[3]);
-      int labelDimension = Integer.parseInt(args[4]);
-
-      int iteration = 1000;
-      double learningRate = 0.4;
-      double momemtumWeight = 0.2;
-      double regularizationWeight = 0.01;
-
-      // parse parameters
-      if (args.length >= 6) {
-        try {
-          iteration = Integer.parseInt(args[5]);
-          System.out.printf("Iteration: %d\n", iteration);
-        } catch (NumberFormatException e) {
-          System.err
-              .println("MAX_ITERATION format invalid. It should be a positive number.");
-          return;
-        }
-      }
-      if (args.length >= 7) {
-        try {
-          learningRate = Double.parseDouble(args[6]);
-          System.out.printf("Learning rate: %f\n", learningRate);
-        } catch (NumberFormatException e) {
-          System.err
-              .println("LEARNING_RATE format invalid. It should be a positive double in range (0, 1.0)");
-          return;
-        }
-      }
-      if (args.length >= 8) {
-        try {
-          momemtumWeight = Double.parseDouble(args[7]);
-          System.out.printf("Momemtum weight: %f\n", momemtumWeight);
-        } catch (NumberFormatException e) {
-          System.err
-              .println("MOMEMTUM_WEIGHT format invalid. It should be a positive double in range (0, 1.0)");
-          return;
-        }
-      }
-      if (args.length >= 9) {
-        try {
-          regularizationWeight = Double.parseDouble(args[8]);
-          System.out
-              .printf("Regularization weight: %f\n", regularizationWeight);
-        } catch (NumberFormatException e) {
-          System.err
-              .println("REGULARIZATION_WEIGHT format invalid. It should be a positive double in range (0, 1.0)");
-          return;
-        }
-      }
-
-      // train the model
-      SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
-      ann.setLearningRate(learningRate);
-      ann.setMomemtumWeight(momemtumWeight);
-      ann.setRegularizationWeight(regularizationWeight);
-      ann.addLayer(featureDimension, false,
-          FunctionFactory.createDoubleFunction("Sigmoid"));
-      ann.addLayer(featureDimension, false,
-          FunctionFactory.createDoubleFunction("Sigmoid"));
-      ann.addLayer(labelDimension, true,
-          FunctionFactory.createDoubleFunction("Sigmoid"));
-      ann.setCostFunction(FunctionFactory
-          .createDoubleDoubleFunction("CrossEntropy"));
-      ann.setModelPath(trainedModelPath);
-
-      Map<String, String> trainingParameters = new HashMap<String, String>();
-      trainingParameters.put("tasks", "5");
-      trainingParameters.put("training.max.iterations", "" + iteration);
-      trainingParameters.put("training.batch.size", "300");
-      trainingParameters.put("convergence.check.interval", "1000");
-      ann.train(new Path(trainingDataPath), trainingParameters);
-    }
-
-  }
-
-  private static void printUsage() {
-    System.out
-        .println("USAGE: <MODE> <INPUT_PATH> <OUTPUT_PATH> <MODEL_PATH>|<FEATURE_DIMENSION> <LABEL_DIMENSION> [<MAX_ITERATION> <LEARNING_RATE> <MOMEMTUM_WEIGHT> <REGULARIZATION_WEIGHT>]");
-    System.out
-        .println("\tMODE\t- train: train the model with given training data.");
-    System.out
-        .println("\t\t- label: obtain the result by feeding the features to the neural network.");
-    System.out
-        .println("\tINPUT_PATH\tin 'train' mode, it is the path of the training data; in 'label' mode, it is the path of the to be evaluated data that lacks the label.");
-    System.out
-        .println("\tOUTPUT_PATH\tin 'train' mode, it is where the trained model is stored; in 'label' mode, it is where the labeled data is stored.");
-    System.out.println("\n\tConditional Parameters:");
-    System.out
-        .println("\tMODEL_PATH\tonly required in 'label' mode. It specifies where to load the trained neural network model.");
-    System.out
-        .println("\tMAX_ITERATION\tonly used in 'train' mode. It specifies how many iterations for the neural network to run. Default is 0.01.");
-    System.out
-        .println("\tLEARNING_RATE\tonly used to 'train' mode. It specifies the degree of aggregation for learning, usually in range (0, 1.0). Default is 0.1.");
-    System.out
-        .println("\tMOMEMTUM_WEIGHT\tonly used to 'train' mode. It specifies the weight of momemtum. Default is 0.");
-    System.out
-        .println("\tREGULARIZATION_WEIGHT\tonly required in 'train' model. It specifies the weight of reqularization.");
-    System.out.println("\nExample:");
-    System.out
-        .println("Train a neural network with with feature dimension 8, label dimension 1 and default setting:\n\tneuralnets train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1");
-    System.out
-        .println("Train a neural network with with feature dimension 8, label dimension 1 and specify learning rate as 0.1, momemtum rate as 0.2, and regularization weight as 0.01:\n\tneuralnets.train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1 0.1 0.2 0.01");
-    System.out
-        .println("Label the data with trained model:\n\tneuralnets evaluate hdfs://localhost:30002/unlabeled_data hdfs://localhost:30002/result hdfs://localhost:30002/model");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java
----------------------------------------------------------------------
diff --git a/examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java b/examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java
deleted file mode 100644
index 6b4798d..0000000
--- a/examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.examples;
-
-import java.io.BufferedReader;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleVector;
-
-/**
- * Test the functionality of NeuralNetwork Example.
- * 
- */
-public class NeuralNetworkTest extends TestCase {
-  private Configuration conf = new HamaConfiguration();
-  private FileSystem fs;
-  private String MODEL_PATH = "/tmp/neuralnets.model";
-  private String RESULT_PATH = "/tmp/neuralnets.txt";
-  private String SEQTRAIN_DATA = "/tmp/test-neuralnets.data";
-  
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
-    fs = FileSystem.get(conf);
-  }
-
-  public void testNeuralnetsLabeling() throws IOException {
-    this.neuralNetworkTraining();
-
-    String dataPath = "src/test/resources/neuralnets_classification_test.txt";
-    String mode = "label";
-    try {
-      NeuralNetwork
-          .main(new String[] { mode, dataPath, RESULT_PATH, MODEL_PATH });
-
-      // compare results with ground-truth
-      BufferedReader groundTruthReader = new BufferedReader(new FileReader(
-          "src/test/resources/neuralnets_classification_label.txt"));
-      List<Double> groundTruthList = new ArrayList<Double>();
-      String line = null;
-      while ((line = groundTruthReader.readLine()) != null) {
-        groundTruthList.add(Double.parseDouble(line));
-      }
-      groundTruthReader.close();
-
-      BufferedReader resultReader = new BufferedReader(new FileReader(
-          RESULT_PATH));
-      List<Double> resultList = new ArrayList<Double>();
-      while ((line = resultReader.readLine()) != null) {
-        resultList.add(Double.parseDouble(line));
-      }
-      resultReader.close();
-      int total = resultList.size();
-      double correct = 0;
-      for (int i = 0; i < groundTruthList.size(); ++i) {
-        double actual = resultList.get(i);
-        double expected = groundTruthList.get(i);
-        if (actual < 0.5 && expected < 0.5 || actual >= 0.5 && expected >= 0.5) {
-          ++correct;
-        }
-      }
-      System.out.printf("Precision: %f\n", correct / total);
-
-    } catch (Exception e) {
-      e.printStackTrace();
-    } finally {
-      fs.delete(new Path(RESULT_PATH), true);
-      fs.delete(new Path(MODEL_PATH), true);
-      fs.delete(new Path(SEQTRAIN_DATA), true);
-    }
-  }
-
-  private void neuralNetworkTraining() {
-    String mode = "train";
-    String strTrainingDataPath = "src/test/resources/neuralnets_classification_training.txt";
-    int featureDimension = 8;
-    int labelDimension = 1;
-
-    Path sequenceTrainingDataPath = new Path(SEQTRAIN_DATA);
-    Configuration conf = new Configuration();
-    FileSystem fs;
-    try {
-      fs = FileSystem.get(conf);
-      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
-          sequenceTrainingDataPath, LongWritable.class, VectorWritable.class);
-      BufferedReader br = new BufferedReader(
-          new FileReader(strTrainingDataPath));
-      String line = null;
-      // convert the data in sequence file format
-      while ((line = br.readLine()) != null) {
-        String[] tokens = line.split(",");
-        double[] vals = new double[tokens.length];
-        for (int i = 0; i < tokens.length; ++i) {
-          vals[i] = Double.parseDouble(tokens[i]);
-        }
-        writer.append(new LongWritable(), new VectorWritable(
-            new DenseDoubleVector(vals)));
-      }
-      writer.close();
-      br.close();
-    } catch (IOException e1) {
-      e1.printStackTrace();
-    }
-
-    try {
-      NeuralNetwork.main(new String[] { mode, SEQTRAIN_DATA,
-          MODEL_PATH, "" + featureDimension, "" + labelDimension });
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/test/resources/neuralnets_classification_label.txt
----------------------------------------------------------------------
diff --git a/examples/src/test/resources/neuralnets_classification_label.txt b/examples/src/test/resources/neuralnets_classification_label.txt
deleted file mode 100644
index e1b6789..0000000
--- a/examples/src/test/resources/neuralnets_classification_label.txt
+++ /dev/null
@@ -1 +0,0 @@
-1
0
0
0
0
0
0
0
1
1
0
1
0
0
1
0
1
0
0
0
0
0
1
0
1
0
1
0
1
1
0
0
0
0
1
1
0
0
0
1
0
1
1
0
0
1
0
0
1
1
0
0
1
0
0
1
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
0
1
1
0
0
1
0
0
1
0
1
1
1
0
0
1
1
1
0
1
0
1
0
1
0
0
0
0
1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/test/resources/neuralnets_classification_test.txt
----------------------------------------------------------------------
diff --git a/examples/src/test/resources/neuralnets_classification_test.txt b/examples/src/test/resources/neuralnets_classification_test.txt
deleted file mode 100644
index b19107d..0000000
--- a/examples/src/test/resources/neuralnets_classification_test.txt
+++ /dev/null
@@ -1 +0,0 @@
-0.588235294,0.557788945,0.573770492,0.272727273,0,0.409836066,0.026900085,0.316666667
0.352941176,0.492462312,0.475409836,0.333333333,0.224586288,0.506706408,0.15029889,0.366666667
0.529411765,0.773869347,0.639344262,0.303030303,0.11820331,0.460506706,0.036720751,0.4
0.352941176,0.829145729,0.557377049,0.262626263,0.19858156,0.500745156,0.236122972,0.466666667
0.058823529,0.497487437,0.475409836,0.101010101,0,0.378539493,0.201964133,0
0.588235294,0.341708543,0.868852459,0.232323232,0.057919622,0.529061103,0.088385995,0.433333333
0.176470588,0.618090452,0.819672131,0.353535354,0.283687943,0.853949329,0.342442357,0.016666667
0.470588235,0.457286432,0.672131148,0,0,0.530551416,0.217335611,0.783333333
0.352941176,0.979899497,0.573770492,0,0,0.460506706,0.106746371,0.166666667
0.529411765,0.783919598,0.704918033,0,0,0.369597615,0.064901793,0.533333333
0,0.467336683,0.491803279,0,0,0.526080477,0.078992314,0.066666667
0.176470588,0.608040201,0.426229508,0,0,0.536512668,0.020922289,0.066666
 667
0.117647059,0.507537688,0.475409836,0.171717172,0.313238771,0.360655738,0.228864219,0.033333333
0.117647059,0.281407035,0.459016393,0.282828283,0.053191489,0.360655738,0.108454313,0.016666667
0,0.814070352,0.62295082,0.363636364,0,0.739195231,0.122117848,0.083333333
0,0.477386935,0.524590164,0.393939394,0.124113475,0.664679583,0.122971819,0.016666667
0.235294118,0.628140704,0.655737705,0,0,0.481371088,0.195559351,0.1
0.294117647,0.683417085,0.672131148,0,0,0,0.239965841,0.8
0.117647059,0.648241206,0.606557377,0.262626263,0.242316785,0.494783905,0.219043553,0.066666667
0.176470588,0.653266332,0.524590164,0,0,0.344262295,0.100768574,0.016666667
0.058823529,0.537688442,0.409836066,0.191919192,0,0.421758569,0.043979505,0.133333333
0.058823529,0.703517588,0.606557377,0.262626263,0.212765957,0.359165425,0.320239112,0.033333333
0.058823529,0.72361809,0.672131148,0.464646465,0.212765957,0.687034277,0.109735269,0.416666667
0.470588235,0.537688442,0.655737705,0,0,0.36661699,0.332194705,0.
 216666667
0.764705882,0.793969849,0.93442623,0,0,0.630402385,0.076430401,0.383333333
0.117647059,0.608040201,0.573770492,0.323232323,0.112293144,0.58271237,0.34500427,0.033333333
0.411764706,0.648241206,0.557377049,0.494949495,0.147754137,0.573770492,0.154141759,0.366666667
0.117647059,0.452261307,0.491803279,0,0,0.350223547,0.04824936,0.066666667
0.411764706,0.713567839,0.737704918,0.242424242,0.567375887,0.453055142,0.021349274,0.366666667
0.176470588,0.849246231,0.606557377,0.191919192,0.147754137,0.445603577,0.081127242,0.166666667
0,0.497487437,0,0,0,0.372578241,0.074722459,0.016666667
0.235294118,0.638190955,0.721311475,0.111111111,0.18321513,0.514157973,0.222032451,0.116666667
0.235294118,0.592964824,0.573770492,0,0,0.66318927,0.352690009,0.083333333
0.117647059,0.613065327,0.62295082,0.272727273,0.236406619,0.535022355,0.17292912,0.083333333
0.352941176,0.628140704,0.639344262,0.313131313,0,0.411326379,0.20794193,0.466666667
0.058823529,0.844221106,0.721311475,0.292929293,0,
 0.521609538,0.353116994,0.516666667
0.117647059,0.648241206,0,0,0,0.573770492,0.096498719,0.333333333
0.235294118,0.552763819,0.62295082,0.202020202,0.11820331,0.423248882,0.017079419,0.1
0.352941176,0.40201005,0.655737705,0.363636364,0,0.59314456,0.042271563,0.116666667
0.588235294,0.577889447,0,0,0,0,0.078138343,0.15
0.117647059,0.638190955,0.37704918,0.212121212,0.395981087,0.51266766,0.041844577,0.016666667
0.529411765,0.824120603,0.639344262,0,0,0.488822653,0.029888984,0.4
0.117647059,0.467336683,0.524590164,0.323232323,0.189125296,0.566318927,0.254483348,0.033333333
0.176470588,0.793969849,0.524590164,0.131313131,0.457446809,0.464977645,0.09265585,0.05
0.294117647,0.633165829,0.639344262,0.272727273,0.026004728,0.441132638,0.154141759,0.316666667
0.588235294,0.648241206,0.508196721,0.363636364,0,0.614008942,0.15499573,0.283333333
0,0.673366834,0.475409836,0.202020202,0.343971631,0.393442623,0.116994022,0
0.176470588,0.512562814,0.606557377,0,0,0.439642325,0.018360376,0.1833333
 33
0.411764706,0.939698492,0.409836066,0.333333333,0.463356974,0.505216095,0.319385141,0.216666667
0.176470588,0.869346734,0.639344262,0.393939394,0.218676123,0.503725782,0.38087105,0.166666667
0.588235294,0.472361809,0.590163934,0.181818182,0,0.344262295,0.220751494,0.583333333
0.058823529,0.542713568,0.491803279,0.464646465,0.210401891,0.529061103,0.143894108,0.05
0.294117647,0.487437186,0.62295082,0.272727273,0,0.530551416,0.128095645,0.516666667
0.235294118,0.417085427,0.704918033,0.191919192,0,0.436661699,0.10204953,0.216666667
0.058823529,0.572864322,0.540983607,0.363636364,0.236406619,0.56780924,0.090093937,0
0.058823529,0.748743719,0.557377049,0.292929293,0.150118203,0.436661699,0.115713066,0.35
0.294117647,0.587939698,0.704918033,0.303030303,0.124113475,0.58271237,0.073868488,0.35
0.058823529,0.557788945,0.770491803,0,0,0.488822653,0.079846285,0.4
0.235294118,0.56281407,0.639344262,0.404040404,0,0.587183308,0.067463706,0.283333333
0.058823529,0.582914573,0.639344262,0.29292
 9293,0.212765957,0.538002981,0.178479932,0.066666667
0,0.708542714,0.68852459,0.262626263,0,0.482861401,0.151579846,0.016666667
0.117647059,0.879396985,0.721311475,0,0,0.341281669,0.1058924,0.016666667
0.117647059,0.462311558,0.426229508,0,0,0.448584203,0.026900085,0.016666667
0.176470588,0.653266332,0.639344262,0.232323232,0.093380615,0.423248882,0.104611443,0.216666667
0.470588235,0.603015075,0.704918033,0,0,0.423248882,0.077284372,0.016666667
0.117647059,0.874371859,0.721311475,0.373737374,0.141843972,0.66318927,0.242527754,0.05
0.117647059,0.532663317,0.459016393,0.272727273,0.195035461,0.43219076,0.148590948,0.016666667
0.117647059,0.527638191,0.614754098,0,0,0.347242921,0.205807003,0.533333333
0.235294118,0.477386935,0.491803279,0.323232323,0,0.52757079,0.087959009,0.116666667
0,0.633165829,0.704918033,0.272727273,0.141843972,0.408345753,0.186592656,0
0.470588235,0.326633166,0.590163934,0.232323232,0,0.476900149,0.222886422,0.35
0.117647059,0.497487437,0.491803279,0.171717172,
 0.189125296,0.545454545,0.160119556,0
0.058823529,0.512562814,0.606557377,0,0,0.588673621,0.091801879,0.35
0.647058824,0.603015075,0.655737705,0.373737374,0.177304965,0.630402385,0.301878736,0.45
0.176470588,0.512562814,0.360655738,0.202020202,0.111111111,0.459016393,0.137489325,0.083333333
0.058823529,0.547738693,0.475409836,0.181818182,0.137115839,0.424739195,0.060204953,0.016666667
0.529411765,0.703517588,0.770491803,0,0,0.48733234,0.280102477,0.4
0.764705882,0.768844221,0.721311475,0.373737374,0.165484634,0.605067064,0.467976089,0.3
0.705882353,0.502512563,0.68852459,0.333333333,0.124113475,0.44709389,0.175064048,0.416666667
0.058823529,0.738693467,0.770491803,0.414141414,0,0.734724292,0.119555935,0.1
0.058823529,0.407035176,0.606557377,0.414141414,0.067375887,0.690014903,0.434671221,0.183333333
0.176470588,0.939698492,0.573770492,0.222222222,0.236406619,0.54247392,0.140905209,0.25
0.352941176,0.814070352,0.508196721,0,0,0.362146051,0.042698548,0.483333333
0.235294118,0.68341708
 5,0.573770492,0,0,0.464977645,0.471391973,0.016666667
0.058823529,0.608040201,0.639344262,0.393939394,0.087470449,0.581222057,0.078138343,0.116666667
0.176470588,0.542713568,0.508196721,0.242424242,0,0.387481371,0.061912895,0.066666667
0,0.909547739,0.721311475,0.444444444,0.602836879,0.645305514,0.061485909,0.083333333
0.470588235,0.773869347,0.639344262,0.323232323,0,0.482861401,0.155849701,0.4
0.058823529,0.64321608,0.721311475,0.393939394,0.130023641,0.543964232,0.418018787,0.266666667
0.411764706,0.688442211,0.737704918,0.414141414,0,0.476900149,0.133646456,0.3
0,0.618090452,0.590163934,0,0,0.540983607,0.076857387,0.516666667
0.058823529,0.532663317,0.62295082,0,0,0.558867362,0.050811272,0.083333333
0.352941176,0.954773869,0.754098361,0,0,0.529061103,0.085397096,0.75
0.117647059,0.442211055,0.475409836,0.262626263,0.01891253,0.423248882,0.293766012,0.016666667
0.529411765,0.854271357,0.606557377,0.313131313,0,0.655737705,0.138770282,0.366666667
0.529411765,0.447236181,0.5081967
 21,0,0,0.335320417,0.027327071,0.2
0.588235294,0.507537688,0.62295082,0.484848485,0.212765957,0.490312966,0.03970965,0.7
0.117647059,0.613065327,0.573770492,0.272727273,0,0.548435171,0.111870196,0.1
0.294117647,0.608040201,0.590163934,0.232323232,0.132387707,0.390461997,0.071306576,0.15
0.058823529,0.633165829,0.491803279,0,0,0.448584203,0.115713066,0.433333333
\ No newline at end of file


[3/5] hama git commit: HAMA-961: Remove ann package

Posted by ed...@apache.org.
http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/AbstractLayeredNeuralNetwork.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/AbstractLayeredNeuralNetwork.java b/ml/src/main/java/org/apache/hama/ml/ann/AbstractLayeredNeuralNetwork.java
deleted file mode 100644
index eaa1c72..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/AbstractLayeredNeuralNetwork.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hama.commons.math.DoubleDoubleFunction;
-import org.apache.hama.commons.math.DoubleFunction;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
-/**
- * AbstractLayeredNeuralNetwork defines the general operations for derivative
- * layered models, include Linear Regression, Logistic Regression, Multilayer
- * Perceptron, Autoencoder, and Restricted Boltzmann Machine, etc.
- * 
- * In general, these models consist of neurons which are aligned in layers.
- * Between layers, for any two adjacent layers, the neurons are connected to
- * form a bipartite weighted graph.
- * 
- */
-abstract class AbstractLayeredNeuralNetwork extends NeuralNetwork {
-
-  private static final double DEFAULT_REGULARIZATION_WEIGHT = 0;
-  private static final double DEFAULT_MOMENTUM_WEIGHT = 0.1;
-
-  double trainingError;
-
-  /* The weight of regularization */
-  protected double regularizationWeight;
-
-  /* The momentumWeight */
-  protected double momentumWeight;
-
-  /* The cost function of the model */
-  protected DoubleDoubleFunction costFunction;
-
-  /* Record the size of each layer */
-  protected List<Integer> layerSizeList;
-
-  protected TrainingMethod trainingMethod;
-  
-  protected LearningStyle learningStyle;
-
-  public static enum TrainingMethod {
-    GRADIENT_DESCENT
-  }
-  
-  public static enum LearningStyle {
-    UNSUPERVISED,
-    SUPERVISED
-  }
-  
-  public AbstractLayeredNeuralNetwork() {
-    this.regularizationWeight = DEFAULT_REGULARIZATION_WEIGHT;
-    this.momentumWeight = DEFAULT_MOMENTUM_WEIGHT;
-    this.trainingMethod = TrainingMethod.GRADIENT_DESCENT;
-    this.learningStyle = LearningStyle.SUPERVISED;
-  }
-
-  public AbstractLayeredNeuralNetwork(String modelPath) {
-    super(modelPath);
-  }
-
-  /**
-   * Set the regularization weight. Recommend in the range [0, 0.1), More
-   * complex the model is, less weight the regularization is.
-   * 
-   * @param regularizationWeight
-   */
-  public void setRegularizationWeight(double regularizationWeight) {
-    Preconditions.checkArgument(regularizationWeight >= 0
-        && regularizationWeight < 1.0,
-        "Regularization weight must be in range [0, 1.0)");
-    this.regularizationWeight = regularizationWeight;
-  }
-
-  public double getRegularizationWeight() {
-    return this.regularizationWeight;
-  }
-
-  /**
-   * Set the momemtum weight for the model. Recommend in range [0, 0.5].
-   * 
-   * @param momentumWeight
-   */
-  public void setMomemtumWeight(double momentumWeight) {
-    Preconditions.checkArgument(momentumWeight >= 0 && momentumWeight <= 1.0,
-        "Momentum weight must be in range [0, 1.0]");
-    this.momentumWeight = momentumWeight;
-  }
-
-  public double getMomemtumWeight() {
-    return this.momentumWeight;
-  }
-
-  public void setTrainingMethod(TrainingMethod method) {
-    this.trainingMethod = method;
-  }
-
-  public TrainingMethod getTrainingMethod() {
-    return this.trainingMethod;
-  }
-  
-  public void setLearningStyle(LearningStyle style) {
-    this.learningStyle = style;
-  }
-  
-  public LearningStyle getLearningStyle() {
-    return this.learningStyle;
-  }
-
-  /**
-   * Set the cost function for the model.
-   * 
-   * @param costFunction
-   */
-  public void setCostFunction(DoubleDoubleFunction costFunction) {
-    this.costFunction = costFunction;
-  }
-
-  /**
-   * Add a layer of neurons with specified size. If the added layer is not the
-   * first layer, it will automatically connects the neurons between with the
-   * previous layer.
-   * 
-   * @param size
-   * @param isFinalLayer If false, add a bias neuron.
-   * @param squashingFunction The squashing function for this layer, input layer
-   *          is f(x) = x by default.
-   * @return The layer index, starts with 0.
-   */
-  public abstract int addLayer(int size, boolean isFinalLayer,
-      DoubleFunction squashingFunction);
-
-  /**
-   * Get the size of a particular layer.
-   * 
-   * @param layer
-   * @return The layer size.
-   */
-  public int getLayerSize(int layer) {
-    Preconditions.checkArgument(
-        layer >= 0 && layer < this.layerSizeList.size(),
-        String.format("Input must be in range [0, %d]\n",
-            this.layerSizeList.size() - 1));
-    return this.layerSizeList.get(layer);
-  }
-
-  /**
-   * Get the layer size list.
-   * 
-   * @return The layer size list.
-   */
-  protected List<Integer> getLayerSizeList() {
-    return this.layerSizeList;
-  }
-
-  /**
-   * Get the weights between layer layerIdx and layerIdx + 1
-   * 
-   * @param layerIdx The index of the layer
-   * @return The weights in form of {@link DoubleMatrix}
-   */
-  public abstract DoubleMatrix getWeightsByLayer(int layerIdx);
-
-  /**
-   * Get the updated weights using one training instance.
-   * 
-   * @param trainingInstance The trainingInstance is the concatenation of
-   *          feature vector and class label vector.
-   * @return The update of each weight, in form of matrix list.
-   * @throws Exception
-   */
-  public abstract DoubleMatrix[] trainByInstance(DoubleVector trainingInstance);
-
-  /**
-   * Get the output calculated by the model.
-   * 
-   * @param instance The feature instance.
-   * @return a new vector with the result of the operation.
-   */
-  public abstract DoubleVector getOutput(DoubleVector instance);
-
-  /**
-   * Calculate the training error based on the labels and outputs.
-   * 
-   * @param labels
-   * @param output
-   */
-  protected abstract void calculateTrainingError(DoubleVector labels,
-      DoubleVector output);
-
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    super.readFields(input);
-    // read regularization weight
-    this.regularizationWeight = input.readDouble();
-    // read momentum weight
-    this.momentumWeight = input.readDouble();
-
-    // read cost function
-    this.costFunction = FunctionFactory
-        .createDoubleDoubleFunction(WritableUtils.readString(input));
-
-    // read layer size list
-    int numLayers = input.readInt();
-    this.layerSizeList = Lists.newArrayList();
-    for (int i = 0; i < numLayers; ++i) {
-      this.layerSizeList.add(input.readInt());
-    }
-
-    this.trainingMethod = WritableUtils.readEnum(input, TrainingMethod.class);
-    this.learningStyle = WritableUtils.readEnum(input, LearningStyle.class);
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    super.write(output);
-    // write regularization weight
-    output.writeDouble(this.regularizationWeight);
-    // write momentum weight
-    output.writeDouble(this.momentumWeight);
-
-    // write cost function
-    WritableUtils.writeString(output, costFunction.getFunctionName());
-
-    // write layer size list
-    output.writeInt(this.layerSizeList.size());
-    for (Integer aLayerSizeList : this.layerSizeList) {
-      output.writeInt(aLayerSizeList);
-    }
-
-    WritableUtils.writeEnum(output, this.trainingMethod);
-    WritableUtils.writeEnum(output, this.learningStyle);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/AutoEncoder.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/AutoEncoder.java b/ml/src/main/java/org/apache/hama/ml/ann/AutoEncoder.java
deleted file mode 100644
index d591f42..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/AutoEncoder.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hama.ml.ann;
-
-import java.util.Map;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleFunction;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.ann.AbstractLayeredNeuralNetwork.LearningStyle;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-import com.google.common.base.Preconditions;
-
-/**
- * AutoEncoder is a model used for dimensional reduction and feature learning.
- * It is a special kind of {@link NeuralNetwork} that consists of three layers
- * of neurons, where the first layer and third layer contains the same number of
- * neurons.
- * 
- */
-public class AutoEncoder {
-
-  private final SmallLayeredNeuralNetwork model;
-
-  /**
-   * Initialize the autoencoder.
-   * 
-   * @param inputDimensions The number of dimensions for the input feature.
-   * @param compressedDimensions The number of dimensions for the compressed
-   *          information.
-   */
-  public AutoEncoder(int inputDimensions, int compressedDimensions) {
-    model = new SmallLayeredNeuralNetwork();
-    model.addLayer(inputDimensions, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    model.addLayer(compressedDimensions, false,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    model.addLayer(inputDimensions, true,
-        FunctionFactory.createDoubleFunction("Sigmoid"));
-    model.setLearningStyle(LearningStyle.UNSUPERVISED);
-    model.setCostFunction(FunctionFactory
-        .createDoubleDoubleFunction("SquaredError"));
-  }
-
-  public AutoEncoder(String modelPath) {
-    model = new SmallLayeredNeuralNetwork(modelPath);
-  }
-
-  public AutoEncoder setLearningRate(double learningRate) {
-    model.setLearningRate(learningRate);
-    return this;
-  }
-
-  public AutoEncoder setMomemtumWeight(double momentumWeight) {
-    model.setMomemtumWeight(momentumWeight);
-    return this;
-  }
-
-  public AutoEncoder setRegularizationWeight(double regularizationWeight) {
-    model.setRegularizationWeight(regularizationWeight);
-    return this;
-  }
-  
-  public AutoEncoder setModelPath(String modelPath) {
-    model.setModelPath(modelPath);
-    return this;
-  }
-
-  /**
-   * Train the autoencoder with given data. Note that the training data is
-   * pre-processed, where the features
-   * 
-   * @param dataInputPath
-   * @param trainingParams
-   */
-  public void train(Path dataInputPath, Map<String, String> trainingParams) {
-    model.train(dataInputPath, trainingParams);
-  }
-
-  /**
-   * Train the model with one instance.
-   * 
-   * @param trainingInstance
-   */
-  public void trainOnline(DoubleVector trainingInstance) {
-    model.trainOnline(trainingInstance);
-  }
-
-  /**
-   * Get the matrix M used to encode the input features.
-   * 
-   * @return this matrix with encode the input.
-   */
-  public DoubleMatrix getEncodeWeightMatrix() {
-    return model.getWeightsByLayer(0);
-  }
-
-  /**
-   * Get the matrix M used to decode the compressed information.
-   * 
-   * @return this matrix with decode the compressed information.
-   */
-  public DoubleMatrix getDecodeWeightMatrix() {
-    return model.getWeightsByLayer(1);
-  }
-
-  /**
-   * Transform the input features.
-   * 
-   * @param inputInstance
-   * @return The compressed information.
-   */
-  private DoubleVector transform(DoubleVector inputInstance, int inputLayer) {
-    DoubleVector internalInstance = new DenseDoubleVector(inputInstance.getDimension() + 1);
-    internalInstance.set(0, 1);
-    for (int i = 0; i < inputInstance.getDimension(); ++i) {
-      internalInstance.set(i + 1, inputInstance.get(i));
-    }
-    DoubleFunction squashingFunction = model
-        .getSquashingFunction(inputLayer);
-    DoubleMatrix weightMatrix = null;
-    if (inputLayer == 0) {
-      weightMatrix = this.getEncodeWeightMatrix();
-    } else {
-      weightMatrix = this.getDecodeWeightMatrix();
-    }
-    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(internalInstance);
-    vec = vec.applyToElements(squashingFunction);
-    return vec;
-  }
-
-  /**
-   * Encode the input instance.
-   * @param inputInstance
-   * @return a new vector with the encode input instance.
-   */
-  public DoubleVector encode(DoubleVector inputInstance) {
-    Preconditions
-        .checkArgument(
-            inputInstance.getDimension() == model.getLayerSize(0) - 1,
-            String.format("The dimension of input instance is %d, but the model requires dimension %d.",
-                    inputInstance.getDimension(), model.getLayerSize(1) - 1));
-    return this.transform(inputInstance, 0);
-  }
-
-  /**
-   * Decode the input instance.
-   * @param inputInstance
-   * @return a new vector with the decode input instance.
-   */
-  public DoubleVector decode(DoubleVector inputInstance) {
-    Preconditions
-        .checkArgument(
-            inputInstance.getDimension() == model.getLayerSize(1) - 1,
-            String.format("The dimension of input instance is %d, but the model requires dimension %d.",
-                    inputInstance.getDimension(), model.getLayerSize(1) - 1));
-    return this.transform(inputInstance, 1);
-  }
-  
-  /**
-   * Get the label(s) according to the given features.
-   * @param inputInstance
-   * @return a new vector with output of the model according to given feature instance.
-   */
-  public DoubleVector getOutput(DoubleVector inputInstance) {
-    return model.getOutput(inputInstance);
-  }
-  
-  /**
-   * Set the feature transformer.
-   * @param featureTransformer
-   */
-  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
-    this.model.setFeatureTransformer(featureTransformer);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetwork.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetwork.java b/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetwork.java
deleted file mode 100644
index 64de418..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetwork.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import com.google.common.base.Preconditions;
-import com.google.common.io.Closeables;
-import org.apache.commons.lang.SerializationUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hama.ml.util.DefaultFeatureTransformer;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Map;
-
-/**
- * NeuralNetwork defines the general operations for all the derivative models.
- * Typically, all derivative models such as Linear Regression, Logistic
- * Regression, and Multilayer Perceptron consist of neurons and the weights
- * between neurons.
- * 
- */
-abstract class NeuralNetwork implements Writable {
-
-  private static final double DEFAULT_LEARNING_RATE = 0.5;
-
-  protected double learningRate;
-  protected boolean learningRateDecay = false;
-
-  // the name of the model
-  protected String modelType;
-  // the path to store the model
-  protected String modelPath;
-
-  protected FeatureTransformer featureTransformer;
-
-  public NeuralNetwork() {
-    this.learningRate = DEFAULT_LEARNING_RATE;
-    this.modelType = this.getClass().getSimpleName();
-    this.featureTransformer = new DefaultFeatureTransformer();
-  }
-
-  public NeuralNetwork(String modelPath) {
-    try {
-      this.modelPath = modelPath;
-      this.readFromModel();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Set the degree of aggression during model training, a large learning rate
-   * can increase the training speed, but it also decrease the chance of model
-   * converge. Recommend in range (0, 0.3).
-   * 
-   * @param learningRate
-   */
-  public void setLearningRate(double learningRate) {
-    Preconditions.checkArgument(learningRate > 0,
-        "Learning rate must be larger than 0.");
-    this.learningRate = learningRate;
-  }
-
-  public double getLearningRate() {
-    return this.learningRate;
-  }
-
-  public void isLearningRateDecay(boolean decay) {
-    this.learningRateDecay = decay;
-  }
-
-  public String getModelType() {
-    return this.modelType;
-  }
-
-  /**
-   * Train the model with the path of given training data and parameters.
-   * 
-   * @param dataInputPath The path of the training data.
-   * @param trainingParams The parameters for training.
-   * @throws IOException
-   */
-  public void train(Path dataInputPath, Map<String, String> trainingParams) {
-    Preconditions.checkArgument(this.modelPath != null,
-        "Please set the model path before training.");
-    // train with BSP job
-    try {
-      trainInternal(dataInputPath, trainingParams);
-      // write the trained model back to model path
-      this.readFromModel();
-    } catch (IOException e) {
-      e.printStackTrace();
-    } catch (InterruptedException e) {
-      e.printStackTrace();
-    } catch (ClassNotFoundException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * Train the model with the path of given training data and parameters.
-   * 
-   * @param dataInputPath
-   * @param trainingParams
-   */
-  protected abstract void trainInternal(Path dataInputPath,
-      Map<String, String> trainingParams) throws IOException,
-      InterruptedException, ClassNotFoundException;
-
-  /**
-   * Read the model meta-data from the specified location.
-   * 
-   * @throws IOException
-   */
-  protected void readFromModel() throws IOException {
-    Preconditions.checkArgument(this.modelPath != null,
-        "Model path has not been set.");
-    Configuration conf = new Configuration();
-    FSDataInputStream is = null;
-    try {
-      URI uri = new URI(this.modelPath);
-      FileSystem fs = FileSystem.get(uri, conf);
-      is = new FSDataInputStream(fs.open(new Path(modelPath)));
-      this.readFields(is);
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    } finally {
-      Closeables.close(is, false);
-    }
-  }
-
-  /**
-   * Write the model data to specified location.
-   * 
-   * @throws IOException
-   */
-  public void writeModelToFile() throws IOException {
-    Preconditions.checkArgument(this.modelPath != null,
-        "Model path has not been set.");
-    Configuration conf = new Configuration();
-    FSDataOutputStream is = null;
-    try {
-      URI uri = new URI(this.modelPath);
-      FileSystem fs = FileSystem.get(uri, conf);
-      is = fs.create(new Path(this.modelPath), true);
-      this.write(is);
-    } catch (URISyntaxException e) {
-      e.printStackTrace();
-    }
-
-    Closeables.close(is, false);
-  }
-
-  /**
-   * Set the model path.
-   * 
-   * @param modelPath
-   */
-  public void setModelPath(String modelPath) {
-    this.modelPath = modelPath;
-  }
-
-  /**
-   * Get the model path.
-   * 
-   * @return the path to store the model.
-   */
-  public String getModelPath() {
-    return this.modelPath;
-  }
-
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    // read model type
-    this.modelType = WritableUtils.readString(input);
-    // read learning rate
-    this.learningRate = input.readDouble();
-    // read model path
-    this.modelPath = WritableUtils.readString(input);
-
-    if (this.modelPath.equals("null")) {
-      this.modelPath = null;
-    }
-
-    // read feature transformer
-    int bytesLen = input.readInt();
-    byte[] featureTransformerBytes = new byte[bytesLen];
-    for (int i = 0; i < featureTransformerBytes.length; ++i) {
-      featureTransformerBytes[i] = input.readByte();
-    }
-
-    Class<? extends FeatureTransformer> featureTransformerCls = (Class<? extends FeatureTransformer>) SerializationUtils
-        .deserialize(featureTransformerBytes);
-
-    Constructor[] constructors = featureTransformerCls
-        .getDeclaredConstructors();
-    Constructor constructor = constructors[0];
-
-    try {
-      this.featureTransformer = (FeatureTransformer) constructor
-          .newInstance(new Object[] {});
-    } catch (InstantiationException e) {
-      e.printStackTrace();
-    } catch (IllegalAccessException e) {
-      e.printStackTrace();
-    } catch (IllegalArgumentException e) {
-      e.printStackTrace();
-    } catch (InvocationTargetException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    // write model type
-    WritableUtils.writeString(output, modelType);
-    // write learning rate
-    output.writeDouble(learningRate);
-    // write model path
-    if (this.modelPath != null) {
-      WritableUtils.writeString(output, modelPath);
-    } else {
-      WritableUtils.writeString(output, "null");
-    }
-
-    // serialize the class
-    Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer
-        .getClass();
-    byte[] featureTransformerBytes = SerializationUtils
-        .serialize(featureTransformerCls);
-    output.writeInt(featureTransformerBytes.length);
-    output.write(featureTransformerBytes);
-  }
-
-  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
-    this.featureTransformer = featureTransformer;
-  }
-
-  public FeatureTransformer getFeatureTransformer() {
-    return this.featureTransformer;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetworkTrainer.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetworkTrainer.java b/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetworkTrainer.java
deleted file mode 100644
index d1e43b9..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/NeuralNetworkTrainer.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hama.bsp.BSP;
-import org.apache.hama.bsp.BSPPeer;
-import org.apache.hama.bsp.sync.SyncException;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.ml.perception.MLPMessage;
-import org.apache.hama.ml.util.DefaultFeatureTransformer;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-/**
- * The trainer that is used to train the {@link SmallLayeredNeuralNetwork} with
- * BSP. The trainer would read the training data and obtain the trained
- * parameters of the model.
- * 
- */
-public abstract class NeuralNetworkTrainer extends
-    BSP<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> {
-
-  protected static final Log LOG = LogFactory
-      .getLog(NeuralNetworkTrainer.class);
-
-  protected Configuration conf;
-  protected int maxIteration;
-  protected int batchSize;
-  protected String trainingMode;
-  
-  protected FeatureTransformer featureTransformer;
-  
-  @Override
-  final public void setup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-    conf = peer.getConfiguration();
-    featureTransformer = new DefaultFeatureTransformer();
-    this.extraSetup(peer);
-  }
-
-  /**
-   * Handle extra setup for sub-classes.
-   * 
-   * @param peer
-   * @throws IOException
-   * @throws SyncException
-   * @throws InterruptedException
-   */
-  protected void extraSetup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-
-  }
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
-  public abstract void bsp(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException;
-
-  @Override
-  public void cleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-    this.extraCleanup(peer);
-    // write model to modelPath
-  }
-
-  /**
-   * Handle cleanup for sub-classes. Write the trained model back.
-   * 
-   * @param peer
-   * @throws IOException
-   * @throws SyncException
-   * @throws InterruptedException
-   */
-  protected void extraCleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetwork.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetwork.java b/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetwork.java
deleted file mode 100644
index fdda61f..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetwork.java
+++ /dev/null
@@ -1,567 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.math.RandomUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.bsp.BSPJob;
-import org.apache.hama.commons.io.MatrixWritable;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DenseDoubleVector;
-import org.apache.hama.commons.math.DoubleFunction;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.mortbay.log.Log;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
-/**
- * SmallLayeredNeuralNetwork defines the general operations for derivative
- * layered models, include Linear Regression, Logistic Regression, Multilayer
- * Perceptron, Autoencoder, and Restricted Boltzmann Machine, etc. For
- * SmallLayeredNeuralNetwork, the training can be conducted in parallel, but the
- * parameters of the models are assumes to be stored in a single machine.
- * 
- * In general, these models consist of neurons which are aligned in layers.
- * Between layers, for any two adjacent layers, the neurons are connected to
- * form a bipartite weighted graph.
- * 
- */
-public class SmallLayeredNeuralNetwork extends AbstractLayeredNeuralNetwork {
-
-  /* Weights between neurons at adjacent layers */
-  protected List<DoubleMatrix> weightMatrixList;
-
-  /* Previous weight updates between neurons at adjacent layers */
-  protected List<DoubleMatrix> prevWeightUpdatesList;
-
-  /* Different layers can have different squashing function */
-  protected List<DoubleFunction> squashingFunctionList;
-
-  protected int finalLayerIdx;
-
-  public SmallLayeredNeuralNetwork() {
-    this.layerSizeList = Lists.newArrayList();
-    this.weightMatrixList = Lists.newArrayList();
-    this.prevWeightUpdatesList = Lists.newArrayList();
-    this.squashingFunctionList = Lists.newArrayList();
-  }
-
-  public SmallLayeredNeuralNetwork(String modelPath) {
-    super(modelPath);
-  }
-
-  @Override
-  /**
-   * {@inheritDoc}
-   */
-  public int addLayer(int size, boolean isFinalLayer,
-      DoubleFunction squashingFunction) {
-    Preconditions.checkArgument(size > 0,
-        "Size of layer must be larger than 0.");
-    if (!isFinalLayer) {
-      size += 1;
-    }
-
-    this.layerSizeList.add(size);
-    int layerIdx = this.layerSizeList.size() - 1;
-    if (isFinalLayer) {
-      this.finalLayerIdx = layerIdx;
-    }
-
-    // add weights between current layer and previous layer, and input layer has
-    // no squashing function
-    if (layerIdx > 0) {
-      int sizePrevLayer = this.layerSizeList.get(layerIdx - 1);
-      // row count equals to size of current size and column count equals to
-      // size of previous layer
-      int row = isFinalLayer ? size : size - 1;
-      int col = sizePrevLayer;
-      DoubleMatrix weightMatrix = new DenseDoubleMatrix(row, col);
-      // initialize weights
-      weightMatrix.applyToElements(new DoubleFunction() {
-        @Override
-        public double apply(double value) {
-          return RandomUtils.nextDouble() - 0.5;
-        }
-
-        @Override
-        public double applyDerivative(double value) {
-          throw new UnsupportedOperationException("");
-        }
-      });
-      this.weightMatrixList.add(weightMatrix);
-      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(row, col));
-      this.squashingFunctionList.add(squashingFunction);
-    }
-    return layerIdx;
-  }
-
-  /**
-   * Update the weight matrices with given matrices.
-   * 
-   * @param matrices
-   */
-  public void updateWeightMatrices(DoubleMatrix[] matrices) {
-    for (int i = 0; i < matrices.length; ++i) {
-      DoubleMatrix matrix = this.weightMatrixList.get(i);
-      this.weightMatrixList.set(i, matrix.add(matrices[i]));
-    }
-  }
-
-  /**
-   * Set the previous weight matrices.
-   * @param prevUpdates
-   */
-  void setPrevWeightMatrices(DoubleMatrix[] prevUpdates) {
-    this.prevWeightUpdatesList.clear();
-    Collections.addAll(this.prevWeightUpdatesList, prevUpdates);
-  }
-
-  /**
-   * Add a batch of matrices onto the given destination matrices.
-   * 
-   * @param destMatrices
-   * @param sourceMatrices
-   */
-  static void matricesAdd(DoubleMatrix[] destMatrices,
-      DoubleMatrix[] sourceMatrices) {
-    for (int i = 0; i < destMatrices.length; ++i) {
-      destMatrices[i] = destMatrices[i].add(sourceMatrices[i]);
-    }
-  }
-
-  /**
-   * Get all the weight matrices.
-   * 
-   * @return The matrices in form of matrix array.
-   */
-  DoubleMatrix[] getWeightMatrices() {
-    DoubleMatrix[] matrices = new DoubleMatrix[this.weightMatrixList.size()];
-    this.weightMatrixList.toArray(matrices);
-    return matrices;
-  }
-
-  /**
-   * Set the weight matrices.
-   * 
-   * @param matrices
-   */
-  public void setWeightMatrices(DoubleMatrix[] matrices) {
-    this.weightMatrixList = new ArrayList<DoubleMatrix>();
-    Collections.addAll(this.weightMatrixList, matrices);
-  }
-
-  /**
-   * Get the previous matrices updates in form of array.
-   * 
-   * @return The matrices in form of matrix array.
-   */
-  public DoubleMatrix[] getPrevMatricesUpdates() {
-    DoubleMatrix[] prevMatricesUpdates = new DoubleMatrix[this.prevWeightUpdatesList
-        .size()];
-    for (int i = 0; i < this.prevWeightUpdatesList.size(); ++i) {
-      prevMatricesUpdates[i] = this.prevWeightUpdatesList.get(i);
-    }
-    return prevMatricesUpdates;
-  }
-
-  public void setWeightMatrix(int index, DoubleMatrix matrix) {
-    Preconditions.checkArgument(
-        0 <= index && index < this.weightMatrixList.size(), String.format(
-            "index [%d] should be in range[%d, %d].", index, 0,
-            this.weightMatrixList.size()));
-    this.weightMatrixList.set(index, matrix);
-  }
-
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    super.readFields(input);
-
-    // read squash functions
-    int squashingFunctionSize = input.readInt();
-    this.squashingFunctionList = Lists.newArrayList();
-    for (int i = 0; i < squashingFunctionSize; ++i) {
-      this.squashingFunctionList.add(FunctionFactory
-          .createDoubleFunction(WritableUtils.readString(input)));
-    }
-
-    // read weights and construct matrices of previous updates
-    int numOfMatrices = input.readInt();
-    this.weightMatrixList = Lists.newArrayList();
-    this.prevWeightUpdatesList = Lists.newArrayList();
-    for (int i = 0; i < numOfMatrices; ++i) {
-      DoubleMatrix matrix = MatrixWritable.read(input);
-      this.weightMatrixList.add(matrix);
-      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(
-          matrix.getRowCount(), matrix.getColumnCount()));
-    }
-
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    super.write(output);
-
-    // write squashing functions
-    output.writeInt(this.squashingFunctionList.size());
-    for (DoubleFunction aSquashingFunctionList : this.squashingFunctionList) {
-      WritableUtils.writeString(output, aSquashingFunctionList
-              .getFunctionName());
-    }
-
-    // write weight matrices
-    output.writeInt(this.weightMatrixList.size());
-    for (DoubleMatrix aWeightMatrixList : this.weightMatrixList) {
-      MatrixWritable.write(aWeightMatrixList, output);
-    }
-
-    // DO NOT WRITE WEIGHT UPDATE
-  }
-
-  @Override
-  public DoubleMatrix getWeightsByLayer(int layerIdx) {
-    return this.weightMatrixList.get(layerIdx);
-  }
-
-  /**
-   * Get the output of the model according to given feature instance.
-   */
-  @Override
-  public DoubleVector getOutput(DoubleVector instance) {
-    Preconditions.checkArgument(this.layerSizeList.get(0) - 1 == instance
-        .getDimension(), String.format(
-        "The dimension of input instance should be %d.",
-        this.layerSizeList.get(0) - 1));
-    // transform the features to another space
-    DoubleVector transformedInstance = this.featureTransformer
-        .transform(instance);
-    // add bias feature
-    DoubleVector instanceWithBias = new DenseDoubleVector(
-        transformedInstance.getDimension() + 1);
-    instanceWithBias.set(0, 0.99999); // set bias to be a little bit less than
-                                      // 1.0
-    for (int i = 1; i < instanceWithBias.getDimension(); ++i) {
-      instanceWithBias.set(i, transformedInstance.get(i - 1));
-    }
-
-    List<DoubleVector> outputCache = getOutputInternal(instanceWithBias);
-    // return the output of the last layer
-    DoubleVector result = outputCache.get(outputCache.size() - 1);
-    // remove bias
-    return result.sliceUnsafe(1, result.getDimension() - 1);
-  }
-
-  /**
-   * Calculate output internally, the intermediate output of each layer will be
-   * stored.
-   * 
-   * @param instanceWithBias The instance contains the features.
-   * @return Cached output of each layer.
-   */
-  public List<DoubleVector> getOutputInternal(DoubleVector instanceWithBias) {
-    List<DoubleVector> outputCache = new ArrayList<DoubleVector>();
-    // fill with instance
-    DoubleVector intermediateOutput = instanceWithBias;
-    outputCache.add(intermediateOutput);
-
-    for (int i = 0; i < this.layerSizeList.size() - 1; ++i) {
-      intermediateOutput = forward(i, intermediateOutput);
-      outputCache.add(intermediateOutput);
-    }
-    return outputCache;
-  }
-
-  /**
-   * Forward the calculation for one layer.
-   * 
-   * @param fromLayer The index of the previous layer.
-   * @param intermediateOutput The intermediateOutput of previous layer.
-   * @return a new vector with the result of the operation.
-   */
-  protected DoubleVector forward(int fromLayer, DoubleVector intermediateOutput) {
-    DoubleMatrix weightMatrix = this.weightMatrixList.get(fromLayer);
-
-    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(intermediateOutput);
-    vec = vec.applyToElements(this.squashingFunctionList.get(fromLayer));
-
-    // add bias
-    DoubleVector vecWithBias = new DenseDoubleVector(vec.getDimension() + 1);
-    vecWithBias.set(0, 1);
-    for (int i = 0; i < vec.getDimension(); ++i) {
-      vecWithBias.set(i + 1, vec.get(i));
-    }
-    return vecWithBias;
-  }
-
-  /**
-   * Train the model online.
-   * 
-   * @param trainingInstance
-   */
-  public void trainOnline(DoubleVector trainingInstance) {
-    DoubleMatrix[] updateMatrices = this.trainByInstance(trainingInstance);
-    this.updateWeightMatrices(updateMatrices);
-  }
-
-  @Override
-  public DoubleMatrix[] trainByInstance(DoubleVector trainingInstance) {
-    DoubleVector transformedVector = this.featureTransformer
-        .transform(trainingInstance.sliceUnsafe(this.layerSizeList.get(0) - 1));
-
-    int inputDimension = this.layerSizeList.get(0) - 1;
-    int outputDimension;
-    DoubleVector inputInstance = null;
-    DoubleVector labels = null;
-    if (this.learningStyle == LearningStyle.SUPERVISED) {
-      outputDimension = this.layerSizeList.get(this.layerSizeList.size() - 1);
-      // validate training instance
-      Preconditions.checkArgument(
-          inputDimension + outputDimension == trainingInstance.getDimension(),
-          String
-              .format(
-                  "The dimension of training instance is %d, but requires %d.",
-                  trainingInstance.getDimension(), inputDimension
-                      + outputDimension));
-
-      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
-      inputInstance.set(0, 1); // add bias
-      // get the features from the transformed vector
-      for (int i = 0; i < inputDimension; ++i) {
-        inputInstance.set(i + 1, transformedVector.get(i));
-      }
-      // get the labels from the original training instance
-      labels = trainingInstance.sliceUnsafe(inputInstance.getDimension() - 1,
-          trainingInstance.getDimension() - 1);
-    } else if (this.learningStyle == LearningStyle.UNSUPERVISED) {
-      // labels are identical to input features
-      outputDimension = inputDimension;
-      // validate training instance
-      Preconditions.checkArgument(inputDimension == trainingInstance
-          .getDimension(), String.format(
-          "The dimension of training instance is %d, but requires %d.",
-          trainingInstance.getDimension(), inputDimension));
-
-      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
-      inputInstance.set(0, 1); // add bias
-      // get the features from the transformed vector
-      for (int i = 0; i < inputDimension; ++i) {
-        inputInstance.set(i + 1, transformedVector.get(i));
-      }
-      // get the labels by copying the transformed vector
-      labels = transformedVector.deepCopy();
-    }
-
-    List<DoubleVector> internalResults = this.getOutputInternal(inputInstance);
-    DoubleVector output = internalResults.get(internalResults.size() - 1);
-
-    // get the training error
-    calculateTrainingError(labels,
-        output.deepCopy().sliceUnsafe(1, output.getDimension() - 1));
-
-    if (this.trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) {
-      return this.trainByInstanceGradientDescent(labels, internalResults);
-    } else {
-      throw new IllegalArgumentException(
-          String.format("Training method is not supported."));
-    }
-  }
-
-  /**
-   * Train by gradient descent. Get the updated weights using one training
-   * instance.
-   * 
-   * @param trainingInstance
-   * @return The weight update matrices.
-   */
-  private DoubleMatrix[] trainByInstanceGradientDescent(DoubleVector labels,
-      List<DoubleVector> internalResults) {
-
-    DoubleVector output = internalResults.get(internalResults.size() - 1);
-    // initialize weight update matrices
-    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.weightMatrixList
-        .size()];
-    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
-      weightUpdateMatrices[m] = new DenseDoubleMatrix(this.weightMatrixList
-          .get(m).getRowCount(), this.weightMatrixList.get(m).getColumnCount());
-    }
-    DoubleVector deltaVec = new DenseDoubleVector(
-        this.layerSizeList.get(this.layerSizeList.size() - 1));
-
-    DoubleFunction squashingFunction = this.squashingFunctionList
-        .get(this.squashingFunctionList.size() - 1);
-
-    DoubleMatrix lastWeightMatrix = this.weightMatrixList
-        .get(this.weightMatrixList.size() - 1);
-    for (int i = 0; i < deltaVec.getDimension(); ++i) {
-      double costFuncDerivative = this.costFunction.applyDerivative(
-          labels.get(i), output.get(i + 1));
-      // add regularization
-      costFuncDerivative += this.regularizationWeight
-          * lastWeightMatrix.getRowVector(i).sum();
-      deltaVec.set(
-          i,
-          costFuncDerivative
-              * squashingFunction.applyDerivative(output.get(i + 1)));
-    }
-
-    // start from previous layer of output layer
-    for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) {
-      output = internalResults.get(layer);
-      deltaVec = backpropagate(layer, deltaVec, internalResults,
-          weightUpdateMatrices[layer]);
-    }
-
-    this.setPrevWeightMatrices(weightUpdateMatrices);
-
-    return weightUpdateMatrices;
-  }
-
-  /**
-   * Back-propagate the errors to from next layer to current layer. The weight
-   * updated information will be stored in the weightUpdateMatrices, and the
-   * delta of the prevLayer would be returned.
-   * 
-   * @param layer Index of current layer.
-   * @param internalOutput Internal output of current layer.
-   * @param deltaVec Delta of next layer.
-   * @return the squashing function of the specified position.
-   */
-  private DoubleVector backpropagate(int curLayerIdx,
-      DoubleVector nextLayerDelta, List<DoubleVector> outputCache,
-      DenseDoubleMatrix weightUpdateMatrix) {
-
-    // get layer related information
-    DoubleFunction squashingFunction = this.squashingFunctionList
-        .get(curLayerIdx);
-    DoubleVector curLayerOutput = outputCache.get(curLayerIdx);
-    DoubleMatrix weightMatrix = this.weightMatrixList.get(curLayerIdx);
-    DoubleMatrix prevWeightMatrix = this.prevWeightUpdatesList.get(curLayerIdx);
-
-    // next layer is not output layer, remove the delta of bias neuron
-    if (curLayerIdx != this.layerSizeList.size() - 2) {
-      nextLayerDelta = nextLayerDelta.slice(1,
-          nextLayerDelta.getDimension() - 1);
-    }
-
-    DoubleVector delta = weightMatrix.transpose()
-        .multiplyVector(nextLayerDelta);
-    for (int i = 0; i < delta.getDimension(); ++i) {
-      delta.set(
-          i,
-          delta.get(i)
-              * squashingFunction.applyDerivative(curLayerOutput.get(i)));
-    }
-
-    // update weights
-    for (int i = 0; i < weightUpdateMatrix.getRowCount(); ++i) {
-      for (int j = 0; j < weightUpdateMatrix.getColumnCount(); ++j) {
-        weightUpdateMatrix.set(i, j,
-            -learningRate * nextLayerDelta.get(i) * curLayerOutput.get(j)
-                + this.momentumWeight * prevWeightMatrix.get(i, j));
-      }
-    }
-
-    return delta;
-  }
-
-  @Override
-  protected void trainInternal(Path dataInputPath,
-      Map<String, String> trainingParams) throws IOException,
-      InterruptedException, ClassNotFoundException {
-    // add all training parameters to configuration
-    Configuration conf = new Configuration();
-    for (Map.Entry<String, String> entry : trainingParams.entrySet()) {
-      conf.set(entry.getKey(), entry.getValue());
-    }
-
-    // if training parameters contains the model path, update the model path
-    String modelPath = trainingParams.get("modelPath");
-    if (modelPath != null) {
-      this.modelPath = modelPath;
-    }
-    // modelPath must be set before training
-    if (this.modelPath == null) {
-      throw new IllegalArgumentException(
-          "Please specify the modelPath for model, "
-              + "either through setModelPath() or add 'modelPath' to the training parameters.");
-    }
-
-    conf.set("modelPath", this.modelPath);
-    this.writeModelToFile();
-
-    HamaConfiguration hamaConf = new HamaConfiguration(conf);
-
-    // create job
-    BSPJob job = new BSPJob(hamaConf, SmallLayeredNeuralNetworkTrainer.class);
-    job.setJobName("Small scale Neural Network training");
-    job.setJarByClass(SmallLayeredNeuralNetworkTrainer.class);
-    job.setBspClass(SmallLayeredNeuralNetworkTrainer.class);
-    job.setInputPath(dataInputPath);
-    job.setInputFormat(org.apache.hama.bsp.SequenceFileInputFormat.class);
-    job.setInputKeyClass(LongWritable.class);
-    job.setInputValueClass(VectorWritable.class);
-    job.setOutputKeyClass(NullWritable.class);
-    job.setOutputValueClass(NullWritable.class);
-    job.setOutputFormat(org.apache.hama.bsp.NullOutputFormat.class);
-
-    int numTasks = conf.getInt("tasks", 1);
-    Log.info(String.format("Number of tasks: %d\n", numTasks));
-    job.setNumBspTask(numTasks);
-    job.waitForCompletion(true);
-
-    // reload learned model
-    Log.info(String.format("Reload model from %s.", this.modelPath));
-    this.readFromModel();
-
-  }
-
-  @Override
-  protected void calculateTrainingError(DoubleVector labels, DoubleVector output) {
-    DoubleVector errors = labels.deepCopy().applyToElements(output,
-        this.costFunction);
-    this.trainingError = errors.sum();
-  }
-
-  /**
-   * Get the squashing function of a specified layer.
-   * 
-   * @param idx
-   * @return a new vector with the result of the operation.
-   */
-  public DoubleFunction getSquashingFunction(int idx) {
-    return this.squashingFunctionList.get(idx);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkMessage.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkMessage.java b/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkMessage.java
deleted file mode 100644
index f941614..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkMessage.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.Writable;
-import org.apache.hama.commons.io.MatrixWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DoubleMatrix;
-
-/**
- * NeuralNetworkMessage transmits the messages between peers during the training
- * of neural networks.
- * 
- */
-public class SmallLayeredNeuralNetworkMessage implements Writable {
-
-  protected double trainingError;
-  protected DoubleMatrix[] curMatrices;
-  protected DoubleMatrix[] prevMatrices;
-  protected boolean converge;
-
-  public SmallLayeredNeuralNetworkMessage() {
-  }
-  
-  public SmallLayeredNeuralNetworkMessage(double trainingError,
-      boolean converge, DoubleMatrix[] weightMatrices,
-      DoubleMatrix[] prevMatrices) {
-    this.trainingError = trainingError;
-    this.converge = converge;
-    this.curMatrices = weightMatrices;
-    this.prevMatrices = prevMatrices;
-  }
-
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    trainingError = input.readDouble();
-    converge = input.readBoolean();
-    int numMatrices = input.readInt();
-    boolean hasPrevMatrices = input.readBoolean();
-    curMatrices = new DenseDoubleMatrix[numMatrices];
-    // read matrice updates
-    for (int i = 0; i < curMatrices.length; ++i) {
-      curMatrices[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
-    }
-
-    if (hasPrevMatrices) {
-      prevMatrices = new DenseDoubleMatrix[numMatrices];
-      // read previous matrices updates
-      for (int i = 0; i < prevMatrices.length; ++i) {
-        prevMatrices[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
-      }
-    }
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    output.writeDouble(trainingError);
-    output.writeBoolean(converge);
-    output.writeInt(curMatrices.length);
-    if (prevMatrices == null) {
-      output.writeBoolean(false);
-    } else {
-      output.writeBoolean(true);
-    }
-    for (DoubleMatrix matrix : curMatrices) {
-      MatrixWritable.write(matrix, output);
-    }
-    if (prevMatrices != null) {
-      for (DoubleMatrix matrix : prevMatrices) {
-        MatrixWritable.write(matrix, output);
-      }
-    }
-  }
-
-  public double getTrainingError() {
-    return trainingError;
-  }
-
-  public void setTrainingError(double trainingError) {
-    this.trainingError = trainingError;
-  }
-
-  public boolean isConverge() {
-    return converge;
-  }
-
-  public void setConverge(boolean converge) {
-    this.converge = converge;
-  }
-
-  public DoubleMatrix[] getCurMatrices() {
-    return curMatrices;
-  }
-
-  public void setMatrices(DoubleMatrix[] curMatrices) {
-    this.curMatrices = curMatrices;
-  }
-
-  public DoubleMatrix[] getPrevMatrices() {
-    return prevMatrices;
-  }
-
-  public void setPrevMatrices(DoubleMatrix[] prevMatrices) {
-    this.prevMatrices = prevMatrices;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkTrainer.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkTrainer.java b/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkTrainer.java
deleted file mode 100644
index 326b7a1..0000000
--- a/ml/src/main/java/org/apache/hama/ml/ann/SmallLayeredNeuralNetworkTrainer.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.ann;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hama.bsp.BSP;
-import org.apache.hama.bsp.BSPPeer;
-import org.apache.hama.bsp.sync.SyncException;
-import org.apache.hama.commons.io.VectorWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-import org.apache.hama.commons.math.DoubleMatrix;
-import org.apache.hama.commons.math.DoubleVector;
-import org.mortbay.log.Log;
-
-/**
- * The trainer that train the {@link SmallLayeredNeuralNetwork} based on BSP
- * framework.
- * 
- */
-public final class SmallLayeredNeuralNetworkTrainer
-    extends
-    BSP<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> {
-
-  private SmallLayeredNeuralNetwork inMemoryModel;
-  private Configuration conf;
-  /* Default batch size */
-  private int batchSize;
-
-  /* check the interval between intervals */
-  private double prevAvgTrainingError;
-  private double curAvgTrainingError;
-  private long convergenceCheckInterval;
-  private long iterations;
-  private long maxIterations;
-  private boolean isConverge;
-
-  private String modelPath;
-
-  @Override
-  /**
-   * If the model path is specified, load the existing from storage location.
-   */
-  public void setup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer) {
-    if (peer.getPeerIndex() == 0) {
-      Log.info("Begin to train");
-    }
-    this.isConverge = false;
-    this.conf = peer.getConfiguration();
-    this.iterations = 0;
-    this.modelPath = conf.get("modelPath");
-    this.maxIterations = conf.getLong("training.max.iterations", 100000);
-    this.convergenceCheckInterval = conf.getLong("convergence.check.interval",
-        2000);
-    this.modelPath = conf.get("modelPath");
-    this.inMemoryModel = new SmallLayeredNeuralNetwork(modelPath);
-    this.prevAvgTrainingError = Integer.MAX_VALUE;
-    this.batchSize = conf.getInt("training.batch.size", 50);
-  }
-
-  @Override
-  /**
-   * Write the trained model back to stored location.
-   */
-  public void cleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer) {
-    // write model to modelPath
-    if (peer.getPeerIndex() == 0) {
-      try {
-        Log.info(String.format("End of training, number of iterations: %d.\n",
-            this.iterations));
-        Log.info(String.format("Write model back to %s\n",
-            inMemoryModel.getModelPath()));
-        this.inMemoryModel.writeModelToFile();
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
-  }
-
-  @Override
-  public void bsp(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-    while (this.iterations++ < maxIterations) {
-      // each groom calculate the matrices updates according to local data
-      calculateUpdates(peer);
-      peer.sync();
-
-      // master merge the updates model
-      if (peer.getPeerIndex() == 0) {
-        mergeUpdates(peer);
-      }
-      peer.sync();
-      if (this.isConverge) {
-        break;
-      }
-    }
-  }
-
-  /**
-   * Calculate the matrices updates according to local partition of data.
-   * 
-   * @param peer
-   * @throws IOException
-   */
-  private void calculateUpdates(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
-      throws IOException {
-    // receive update information from master
-    if (peer.getNumCurrentMessages() != 0) {
-      SmallLayeredNeuralNetworkMessage inMessage = peer.getCurrentMessage();
-      DoubleMatrix[] newWeights = inMessage.getCurMatrices();
-      DoubleMatrix[] preWeightUpdates = inMessage.getPrevMatrices();
-      this.inMemoryModel.setWeightMatrices(newWeights);
-      this.inMemoryModel.setPrevWeightMatrices(preWeightUpdates);
-      this.isConverge = inMessage.isConverge();
-      // check converge
-      if (isConverge) {
-        return;
-      }
-    }
-
-    DoubleMatrix[] weightUpdates = new DoubleMatrix[this.inMemoryModel.weightMatrixList
-        .size()];
-    for (int i = 0; i < weightUpdates.length; ++i) {
-      int row = this.inMemoryModel.weightMatrixList.get(i).getRowCount();
-      int col = this.inMemoryModel.weightMatrixList.get(i).getColumnCount();
-      weightUpdates[i] = new DenseDoubleMatrix(row, col);
-    }
-
-    // continue to train
-    double avgTrainingError = 0.0;
-    LongWritable key = new LongWritable();
-    VectorWritable value = new VectorWritable();
-    for (int recordsRead = 0; recordsRead < batchSize; ++recordsRead) {
-      if (!peer.readNext(key, value)) {
-        peer.reopenInput();
-        peer.readNext(key, value);
-      }
-      DoubleVector trainingInstance = value.getVector();
-      SmallLayeredNeuralNetwork.matricesAdd(weightUpdates,
-          this.inMemoryModel.trainByInstance(trainingInstance));
-      avgTrainingError += this.inMemoryModel.trainingError;
-    }
-    avgTrainingError /= batchSize;
-
-    // calculate the average of updates
-    for (int i = 0; i < weightUpdates.length; ++i) {
-      weightUpdates[i] = weightUpdates[i].divide(batchSize);
-    }
-
-    DoubleMatrix[] prevWeightUpdates = this.inMemoryModel
-        .getPrevMatricesUpdates();
-    SmallLayeredNeuralNetworkMessage outMessage = new SmallLayeredNeuralNetworkMessage(
-        avgTrainingError, false, weightUpdates, prevWeightUpdates);
-    peer.send(peer.getPeerName(0), outMessage);
-  }
-
-  /**
-   * Merge the updates according to the updates of the grooms.
-   * 
-   * @param peer
-   * @throws IOException
-   */
-  private void mergeUpdates(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
-      throws IOException {
-    int numMessages = peer.getNumCurrentMessages();
-    boolean isConverge = false;
-    if (numMessages == 0) { // converges
-      isConverge = true;
-      return;
-    }
-
-    double avgTrainingError = 0;
-    DoubleMatrix[] matricesUpdates = null;
-    DoubleMatrix[] prevMatricesUpdates = null;
-
-    while (peer.getNumCurrentMessages() > 0) {
-      SmallLayeredNeuralNetworkMessage message = peer.getCurrentMessage();
-      if (matricesUpdates == null) {
-        matricesUpdates = message.getCurMatrices();
-        prevMatricesUpdates = message.getPrevMatrices();
-      } else {
-        SmallLayeredNeuralNetwork.matricesAdd(matricesUpdates,
-            message.getCurMatrices());
-        SmallLayeredNeuralNetwork.matricesAdd(prevMatricesUpdates,
-            message.getPrevMatrices());
-      }
-      avgTrainingError += message.getTrainingError();
-    }
-
-    if (numMessages != 1) {
-      avgTrainingError /= numMessages;
-      for (int i = 0; i < matricesUpdates.length; ++i) {
-        matricesUpdates[i] = matricesUpdates[i].divide(numMessages);
-        prevMatricesUpdates[i] = prevMatricesUpdates[i].divide(numMessages);
-      }
-    }
-    this.inMemoryModel.updateWeightMatrices(matricesUpdates);
-    this.inMemoryModel.setPrevWeightMatrices(prevMatricesUpdates);
-
-    // check convergence
-    if (iterations % convergenceCheckInterval == 0) {
-      if (prevAvgTrainingError < curAvgTrainingError) {
-        // error cannot decrease any more
-        isConverge = true;
-      }
-      // update
-      prevAvgTrainingError = curAvgTrainingError;
-      curAvgTrainingError = 0;
-    }
-    curAvgTrainingError += avgTrainingError / convergenceCheckInterval;
-
-    // broadcast updated weight matrices
-    for (String peerName : peer.getAllPeerNames()) {
-      SmallLayeredNeuralNetworkMessage msg = new SmallLayeredNeuralNetworkMessage(
-          0, isConverge, this.inMemoryModel.getWeightMatrices(),
-          this.inMemoryModel.getPrevMatricesUpdates());
-      peer.send(peerName, msg);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/MLPMessage.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/MLPMessage.java b/ml/src/main/java/org/apache/hama/ml/perception/MLPMessage.java
deleted file mode 100644
index a4a1a99..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/MLPMessage.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import org.apache.hadoop.io.Writable;
-
-/**
- * MLPMessage is used to hold the parameters that needs to be sent between the
- * tasks.
- */
-public abstract class MLPMessage implements Writable {
-  protected boolean terminated;
-
-  public MLPMessage() {
-  }
-  
-  public MLPMessage(boolean terminated) {
-    setTerminated(terminated);
-  }
-
-
-  public void setTerminated(boolean terminated) {
-    this.terminated = terminated;
-  }
-
-  public boolean isTerminated() {
-    return terminated;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/MultiLayerPerceptron.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/MultiLayerPerceptron.java b/ml/src/main/java/org/apache/hama/ml/perception/MultiLayerPerceptron.java
deleted file mode 100644
index 8901549..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/MultiLayerPerceptron.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hama.commons.math.DoubleDoubleFunction;
-import org.apache.hama.commons.math.DoubleFunction;
-import org.apache.hama.commons.math.DoubleVector;
-import org.apache.hama.commons.math.FunctionFactory;
-import org.apache.hama.ml.ann.NeuralNetworkTrainer;
-import org.apache.hama.ml.util.DefaultFeatureTransformer;
-import org.apache.hama.ml.util.FeatureTransformer;
-
-/**
- * PerceptronBase defines the common behavior of all the concrete perceptrons.
- */
-public abstract class MultiLayerPerceptron {
-
-  /* The trainer for the model */
-  protected NeuralNetworkTrainer trainer;
-  /* The file path that contains the model meta-data */
-  protected String modelPath;
-
-  /* Model meta-data */
-  protected String MLPType;
-  protected double learningRate;
-  protected double regularization;
-  protected double momentum;
-  protected int numberOfLayers;
-  protected String squashingFunctionName;
-  protected String costFunctionName;
-  protected int[] layerSizeArray;
-
-  protected DoubleDoubleFunction costFunction;
-  protected DoubleFunction squashingFunction;
-
-  // transform the original features to new space
-  protected FeatureTransformer featureTransformer;
-
-  /**
-   * Initialize the MLP.
-   * 
-   * @param learningRate Larger learningRate makes MLP learn more aggressive.
-   *          Learning rate cannot be negative.
-   * @param regularization Regularization makes MLP less likely to overfit. The
-   *          value of regularization cannot be negative or too large, otherwise
-   *          it will affect the precision.
-   * @param momentum The momentum makes the historical adjust have affect to
-   *          current adjust. The weight of momentum cannot be negative.
-   * @param squashingFunctionName The name of squashing function.
-   * @param costFunctionName The name of the cost function.
-   * @param layerSizeArray The number of neurons for each layer. Note that the
-   *          actual size of each layer is one more than the input size.
-   */
-  public MultiLayerPerceptron(double learningRate, double regularization,
-      double momentum, String squashingFunctionName, String costFunctionName,
-      int[] layerSizeArray) {
-    this.MLPType = getTypeName();
-    if (learningRate <= 0) {
-      throw new IllegalStateException("learning rate cannot be negative.");
-    }
-    this.learningRate = learningRate;
-    if (regularization < 0 || regularization >= 0.5) {
-      throw new IllegalStateException(
-          "regularization weight must be in range (0, 0.5).");
-    }
-    this.regularization = regularization; // no regularization
-    if (momentum < 0) {
-      throw new IllegalStateException("momentum weight cannot be negative.");
-    }
-    this.momentum = momentum; // no momentum
-    this.squashingFunctionName = squashingFunctionName;
-    this.costFunctionName = costFunctionName;
-    this.layerSizeArray = layerSizeArray;
-    this.numberOfLayers = this.layerSizeArray.length;
-
-    this.costFunction = FunctionFactory
-        .createDoubleDoubleFunction(this.costFunctionName);
-    this.squashingFunction = FunctionFactory
-        .createDoubleFunction(this.squashingFunctionName);
-
-    this.featureTransformer = new DefaultFeatureTransformer();
-  }
-
-  /**
-   * Initialize a multi-layer perceptron with existing model.
-   * 
-   * @param modelPath Location of existing model meta-data.
-   */
-  public MultiLayerPerceptron(String modelPath) {
-    this.modelPath = modelPath;
-  }
-
-  /**
-   * Train the model with given data. This method invokes a perceptron training
-   * BSP task to train the model. It then write the model to modelPath.
-   * 
-   * @param dataInputPath The path of the data.
-   * @param trainingParams Extra parameters for training.
-   */
-  public abstract void train(Path dataInputPath,
-      Map<String, String> trainingParams) throws Exception;
-
-  /**
-   * Get the output based on the input instance and the learned model.
-   * 
-   * @param featureVector The feature of an instance to feed the perceptron.
-   * @return The results.
-   */
-  public DoubleVector output(DoubleVector featureVector) {
-    return this.outputWrapper(this.featureTransformer.transform(featureVector));
-  }
-
-  public abstract DoubleVector outputWrapper(DoubleVector featureVector);
-
-  /**
-   * Use the class name as the type name.
-   */
-  protected abstract String getTypeName();
-
-  /**
-   * Read the model meta-data from the specified location.
-   * 
-   * @throws IOException
-   */
-  protected abstract void readFromModel() throws IOException;
-
-  /**
-   * Write the model data to specified location.
-   * 
-   * @param modelPath The location in file system to store the model.
-   * @throws IOException
-   */
-  public abstract void writeModelToFile(String modelPath) throws IOException;
-
-  public String getModelPath() {
-    return modelPath;
-  }
-
-  public String getMLPType() {
-    return MLPType;
-  }
-
-  public double getLearningRate() {
-    return learningRate;
-  }
-
-  public double isRegularization() {
-    return regularization;
-  }
-
-  public double getMomentum() {
-    return momentum;
-  }
-
-  public int getNumberOfLayers() {
-    return numberOfLayers;
-  }
-
-  public String getSquashingFunctionName() {
-    return squashingFunctionName;
-  }
-
-  public String getCostFunctionName() {
-    return costFunctionName;
-  }
-
-  public int[] getLayerSizeArray() {
-    return layerSizeArray;
-  }
-
-  /**
-   * Set the feature transformer.
-   * 
-   * @param featureTransformer
-   */
-  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
-    this.featureTransformer = featureTransformer;
-  }
-  
-  public FeatureTransformer getFeatureTransformer() {
-    return this.featureTransformer;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/PerceptronTrainer.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/PerceptronTrainer.java b/ml/src/main/java/org/apache/hama/ml/perception/PerceptronTrainer.java
deleted file mode 100644
index 0baf132..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/PerceptronTrainer.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hama.bsp.BSP;
-import org.apache.hama.bsp.BSPPeer;
-import org.apache.hama.bsp.sync.SyncException;
-import org.apache.hama.commons.io.VectorWritable;
-
-/**
- * The trainer that is used to train the perceptron with BSP. The trainer would
- * read the training data and obtain the trained parameters of the model.
- * 
- */
-public abstract class PerceptronTrainer extends
-    BSP<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> {
-
-  protected Configuration conf;
-  protected int maxIteration;
-  protected int batchSize;
-  protected String trainingMode;
-
-  @Override
-  public void setup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-    conf = peer.getConfiguration();
-    trainingMode = conf.get("training.mode");
-    batchSize = conf.getInt("training.batch.size", 100); // mini-batch by
-                                                         // default
-    this.extraSetup(peer);
-  }
-
-  /**
-   * Handle extra setup for sub-classes.
-   * 
-   * @param peer
-   * @throws IOException
-   * @throws SyncException
-   * @throws InterruptedException
-   */
-  protected void extraSetup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException {
-  }
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
-  public abstract void bsp(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException, SyncException, InterruptedException;
-
-  @Override
-  public void cleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-
-    this.extraCleanup(peer);
-  }
-
-  /**
-   * Handle extra cleanup for sub-classes.
-   * 
-   * @param peer
-   * @throws IOException
-   * @throws SyncException
-   * @throws InterruptedException
-   */
-  protected void extraCleanup(
-      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
-      throws IOException {
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPMessage.java
----------------------------------------------------------------------
diff --git a/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPMessage.java b/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPMessage.java
deleted file mode 100644
index 5504cf9..0000000
--- a/ml/src/main/java/org/apache/hama/ml/perception/SmallMLPMessage.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.ml.perception;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hama.commons.io.MatrixWritable;
-import org.apache.hama.commons.math.DenseDoubleMatrix;
-
-/**
- * SmallMLPMessage is used to exchange information for the
- * {@link SmallMultiLayerPerceptron}. It send the whole parameter matrix from
- * one task to another.
- */
-public class SmallMLPMessage extends MLPMessage {
-
-  private int owner; // the ID of the task who creates the message
-  private int numOfUpdatedMatrices;
-  private DenseDoubleMatrix[] weightUpdatedMatrices;
-  private int numOfPrevUpdatedMatrices;
-  private DenseDoubleMatrix[] prevWeightUpdatedMatrices;
-
-  public SmallMLPMessage() {
-    super();
-  }
-  
-  /**
-   * When slave send message to master, use this constructor.
-   * 
-   * @param owner The owner that create the message
-   * @param terminated Whether the training is terminated for the owner task
-   * @param weightUpdatedMatrics The weight updates
-   */
-  public SmallMLPMessage(int owner, boolean terminated,
-      DenseDoubleMatrix[] weightUpdatedMatrics) {
-    super(terminated);
-    this.owner = owner;
-    this.weightUpdatedMatrices = weightUpdatedMatrics;
-    this.numOfUpdatedMatrices = this.weightUpdatedMatrices == null ? 0
-        : this.weightUpdatedMatrices.length;
-    this.numOfPrevUpdatedMatrices = 0;
-    this.prevWeightUpdatedMatrices = null;
-  }
-
-  /**
-   * When master send message to slave, use this constructor.
-   * 
-   * @param owner The owner that create the message
-   * @param terminated Whether the training is terminated for the owner task
-   * @param weightUpdatedMatrices The weight updates
-   * @param prevWeightUpdatedMatrices
-   */
-  public SmallMLPMessage(int owner, boolean terminated,
-      DenseDoubleMatrix[] weightUpdatedMatrices,
-      DenseDoubleMatrix[] prevWeightUpdatedMatrices) {
-    this(owner, terminated, weightUpdatedMatrices);
-    this.prevWeightUpdatedMatrices = prevWeightUpdatedMatrices;
-    this.numOfPrevUpdatedMatrices = this.prevWeightUpdatedMatrices == null ? 0
-        : this.prevWeightUpdatedMatrices.length;
-  }
-
-  /**
-   * Get the owner task Id of the message.
-   * 
-   * @return the owner value.
-   */
-  public int getOwner() {
-    return owner;
-  }
-
-  /**
-   * Get the updated weight matrices.
-   * 
-   * @return the array value of dense double matrix object.
-   */
-  public DenseDoubleMatrix[] getWeightUpdatedMatrices() {
-    return this.weightUpdatedMatrices;
-  }
-
-  public DenseDoubleMatrix[] getPrevWeightsUpdatedMatrices() {
-    return this.prevWeightUpdatedMatrices;
-  }
-
-  @Override
-  public void readFields(DataInput input) throws IOException {
-    this.owner = input.readInt();
-    this.terminated = input.readBoolean();
-    this.numOfUpdatedMatrices = input.readInt();
-    this.weightUpdatedMatrices = new DenseDoubleMatrix[this.numOfUpdatedMatrices];
-    for (int i = 0; i < this.numOfUpdatedMatrices; ++i) {
-      this.weightUpdatedMatrices[i] = (DenseDoubleMatrix) MatrixWritable
-          .read(input);
-    }
-    this.numOfPrevUpdatedMatrices = input.readInt();
-    this.prevWeightUpdatedMatrices = new DenseDoubleMatrix[this.numOfPrevUpdatedMatrices];
-    for (int i = 0; i < this.numOfPrevUpdatedMatrices; ++i) {
-      this.prevWeightUpdatedMatrices[i] = (DenseDoubleMatrix) MatrixWritable
-          .read(input);
-    }
-  }
-
-  @Override
-  public void write(DataOutput output) throws IOException {
-    output.writeInt(this.owner);
-    output.writeBoolean(this.terminated);
-    output.writeInt(this.numOfUpdatedMatrices);
-    for (int i = 0; i < this.numOfUpdatedMatrices; ++i) {
-      MatrixWritable.write(this.weightUpdatedMatrices[i], output);
-    }
-    output.writeInt(this.numOfPrevUpdatedMatrices);
-    for (int i = 0; i < this.numOfPrevUpdatedMatrices; ++i) {
-      MatrixWritable.write(this.prevWeightUpdatedMatrices[i], output);
-    }
-  }
-
-}


[4/5] hama git commit: HAMA-961: Remove ann package

Posted by ed...@apache.org.
http://git-wip-us.apache.org/repos/asf/hama/blob/3a3ea7a3/examples/src/test/resources/neuralnets_classification_training.txt
----------------------------------------------------------------------
diff --git a/examples/src/test/resources/neuralnets_classification_training.txt b/examples/src/test/resources/neuralnets_classification_training.txt
deleted file mode 100644
index 405fb69..0000000
--- a/examples/src/test/resources/neuralnets_classification_training.txt
+++ /dev/null
@@ -1,668 +0,0 @@
-0.35294117647058826,0.7437185929648241,0.5901639344262295,0.35353535353535354,0.0,0.5007451564828614,0.23441502988898377,0.48333333333333334,1.0
-0.058823529411764705,0.4271356783919598,0.5409836065573771,0.29292929292929293,0.0,0.3964232488822653,0.11656703672075147,0.16666666666666666,0.0
-0.47058823529411764,0.9195979899497487,0.5245901639344263,0.0,0.0,0.34724292101341286,0.2536293766011956,0.18333333333333332,1.0
-0.058823529411764705,0.4472361809045226,0.5409836065573771,0.23232323232323232,0.1111111111111111,0.41877794336810736,0.038001707941929974,0.0,0.0
-0.0,0.6884422110552764,0.32786885245901637,0.35353535353535354,0.19858156028368795,0.6423248882265277,0.9436379163108454,0.2,1.0
-0.29411764705882354,0.5829145728643216,0.6065573770491803,0.0,0.0,0.3815201192250373,0.052519214346712216,0.15,0.0
-0.17647058823529413,0.39195979899497485,0.4098360655737705,0.32323232323232326,0.10401891252955082,0.4619970193740686,0.07258753202391117,0.08333333333333333,1.0
-0.5882352941176471,0.5778894472361809,0.0,0.0,0.0,0.526080476900149,0.023911187019641334,0.13333333333333333,0.0
-0.11764705882352941,0.9899497487437185,0.5737704918032787,0.45454545454545453,0.6418439716312057,0.4545454545454546,0.034158838599487616,0.5333333333333333,1.0
-0.47058823529411764,0.628140703517588,0.7868852459016393,0.0,0.0,0.0,0.06575576430401367,0.55,1.0
-0.23529411764705882,0.5527638190954773,0.7540983606557377,0.0,0.0,0.5603576751117736,0.04824935952177626,0.15,0.0
-0.5882352941176471,0.8442211055276382,0.6065573770491803,0.0,0.0,0.5663189269746647,0.1959863364645602,0.21666666666666667,1.0
-0.5882352941176471,0.6984924623115578,0.6557377049180327,0.0,0.0,0.40387481371087935,0.5819812126387702,0.6,0.0
-0.058823529411764705,0.949748743718593,0.4918032786885246,0.23232323232323232,1.0,0.4485842026825634,0.13663535439795046,0.6333333333333333,1.0
-0.29411764705882354,0.8341708542713567,0.5901639344262295,0.1919191919191919,0.20685579196217493,0.3845007451564829,0.21733561058923997,0.5,1.0
-0.4117647058823529,0.5025125628140703,0.0,0.0,0.0,0.44709388971684055,0.17335610589239964,0.18333333333333332,1.0
-0.0,0.592964824120603,0.6885245901639344,0.47474747474747475,0.2718676122931442,0.6825633383010432,0.20196413321947054,0.16666666666666666,1.0
-0.4117647058823529,0.5376884422110553,0.6065573770491803,0.0,0.0,0.4411326378539494,0.07514944491887275,0.16666666666666666,1.0
-0.058823529411764705,0.5175879396984925,0.2459016393442623,0.3838383838383838,0.09810874704491726,0.6453055141579732,0.04483347566182749,0.2,0.0
-0.058823529411764705,0.5778894472361809,0.5737704918032787,0.30303030303030304,0.11347517730496454,0.5156482861400895,0.19257045260461145,0.18333333333333332,1.0
-0.17647058823529413,0.6331658291457286,0.7213114754098361,0.41414141414141414,0.2777777777777778,0.5856929955290611,0.2672929120409906,0.1,0.0
-0.47058823529411764,0.49748743718592964,0.6885245901639344,0.0,0.0,0.5275707898658718,0.13236549957301452,0.48333333333333334,0.0
-0.4117647058823529,0.9849246231155779,0.7377049180327869,0.0,0.0,0.5931445603576752,0.159265584970111,0.3333333333333333,1.0
-0.5294117647058824,0.5979899497487438,0.6557377049180327,0.35353535353535354,0.0,0.43219076005961254,0.07899231426131512,0.13333333333333333,1.0
-0.6470588235294118,0.7185929648241206,0.7704918032786885,0.3333333333333333,0.17257683215130024,0.5454545454545455,0.07514944491887275,0.5,1.0
-0.5882352941176471,0.628140703517588,0.5737704918032787,0.26262626262626265,0.1359338061465721,0.4634873323397914,0.05422715627668659,0.3333333333333333,1.0
-0.4117647058823529,0.7386934673366834,0.6229508196721312,0.0,0.0,0.587183308494784,0.07643040136635354,0.36666666666666664,1.0
-0.058823529411764705,0.48743718592964824,0.5409836065573771,0.15151515151515152,0.16548463356973994,0.34575260804769004,0.17463706233988044,0.016666666666666666,0.0
-0.7647058823529411,0.7286432160804021,0.6721311475409836,0.1919191919191919,0.13002364066193853,0.330849478390462,0.07130657557643039,0.6,0.0
-0.29411764705882354,0.5879396984924623,0.7540983606557377,0.0,0.0,0.5081967213114754,0.11058923996584116,0.2833333333333333,0.0
-0.29411764705882354,0.5477386934673367,0.6147540983606558,0.26262626262626265,0.0,0.5365126676602087,0.19982920580700256,0.65,0.0
-0.17647058823529413,0.7939698492462312,0.6229508196721312,0.36363636363636365,0.2895981087470449,0.4709388971684054,0.3300597779675491,0.11666666666666667,1.0
-0.17647058823529413,0.44221105527638194,0.47540983606557374,0.1111111111111111,0.06382978723404255,0.3695976154992549,0.0807002561912895,0.016666666666666666,0.0
-0.35294117647058826,0.4623115577889447,0.7540983606557377,0.0,0.0,0.2965722801788376,0.04696840307429547,0.11666666666666667,0.0
-0.5882352941176471,0.6130653266331658,0.639344262295082,0.31313131313131315,0.0,0.41132637853949333,0.1853116994022203,0.4,0.0
-0.23529411764705882,0.5175879396984925,0.4918032786885246,0.3333333333333333,0.22695035460992907,0.35767511177347244,0.37916310845431256,0.2,0.0
-0.6470588235294118,0.6934673366834171,0.6229508196721312,0.0,0.0,0.4947839046199703,0.14602903501280956,0.23333333333333334,0.0
-0.5294117647058824,0.5125628140703518,0.6229508196721312,0.37373737373737376,0.0,0.4903129657228018,0.2506404782237404,0.4166666666666667,1.0
-0.11764705882352941,0.45226130653266333,0.5573770491803278,0.42424242424242425,0.0,0.5692995529061103,0.18146883005977796,0.1,1.0
-0.23529411764705882,0.5577889447236181,0.5901639344262295,0.47474747474747475,0.24468085106382978,0.5529061102831595,0.5602049530315968,0.5833333333333334,1.0
-0.17647058823529413,0.9045226130653267,0.5245901639344263,0.25252525252525254,0.08274231678486997,0.5067064083457526,0.08240819812126388,0.08333333333333333,0.0
-0.4117647058823529,0.6683417085427136,0.6885245901639344,0.0,0.0,0.5991058122205664,0.2638770281810418,0.26666666666666666,0.0
-0.4117647058823529,0.5326633165829145,0.7540983606557377,0.18181818181818182,0.0,0.338301043219076,0.06703672075149443,0.45,0.0
-0.5294117647058824,0.8592964824120602,0.9016393442622951,0.24242424242424243,0.28368794326241137,0.676602086438152,0.2745516652433817,0.55,1.0
-0.4117647058823529,0.7989949748743719,0.5245901639344263,0.0,0.0,0.40834575260804773,0.09222886421861655,0.31666666666666665,0.0
-0.0,0.9045226130653267,0.5409836065573771,0.3939393939393939,0.0,0.6259314456035768,0.7749786507258752,0.06666666666666667,1.0
-0.058823529411764705,0.7336683417085427,0.45901639344262296,0.0,0.0,0.4426229508196722,0.20751494449188723,0.13333333333333333,0.0
-0.11764705882352941,0.35678391959798994,0.5737704918032787,0.2727272727272727,0.0,0.41728763040238454,0.21690862510674636,0.016666666666666666,0.0
-0.4117647058823529,0.5175879396984925,0.5409836065573771,0.32323232323232326,0.0,0.5827123695976155,0.1135781383432963,0.16666666666666666,1.0
-0.4117647058823529,0.5276381909547738,0.0,0.0,0.0,0.0,0.0969257045260461,0.05,0.0
-0.058823529411764705,0.5175879396984925,0.6557377049180327,0.1111111111111111,0.09692671394799054,0.28912071535022354,0.1763450042698548,0.016666666666666666,0.0
-0.058823529411764705,0.507537688442211,0.4098360655737705,0.15151515151515152,0.0425531914893617,0.36065573770491804,0.19128949615713065,0.08333333333333333,0.0
-0.29411764705882354,0.44221105527638194,0.5409836065573771,0.21212121212121213,0.027186761229314422,0.36363636363636365,0.11272416737830913,0.15,0.0
-0.47058823529411764,0.8844221105527639,0.7377049180327869,0.3434343434343434,0.3546099290780142,0.5022354694485843,0.16609735269000853,0.6166666666666667,1.0
-0.4117647058823529,0.7537688442211056,0.5409836065573771,0.42424242424242425,0.40425531914893614,0.5171385991058123,0.27327070879590093,0.35,0.0
-0.058823529411764705,0.36683417085427134,0.4098360655737705,0.10101010101010101,0.0,0.34277198211624443,0.07258753202391117,0.0,0.0
-0.4117647058823529,0.9396984924623115,0.5573770491803278,0.3939393939393939,0.35933806146572106,0.5618479880774964,0.07514944491887275,0.3333333333333333,1.0
-0.0,0.5025125628140703,0.7213114754098361,0.6060606060606061,0.13002364066193853,0.6974664679582713,0.37745516652433814,0.16666666666666666,0.0
-0.0,0.7336683417085427,0.6721311475409836,0.0,0.0,0.6035767511177348,0.7271562766865926,0.38333333333333336,0.0
-0.0,0.5276381909547738,0.5245901639344263,0.41414141414141414,0.16784869976359337,0.6184798807749627,0.04056362083689154,0.016666666666666666,0.0
-0.11764705882352941,0.4221105527638191,0.0,0.0,0.0,0.0,0.0964987190435525,0.0,0.0
-0.47058823529411764,0.6683417085427136,0.5901639344262295,0.0,0.0,0.4903129657228018,0.08198121263877028,0.3,1.0
-0.29411764705882354,0.22110552763819097,0.5081967213114754,0.0,0.0,0.37257824143070045,0.21733561058923997,0.25,0.0
-0.11764705882352941,0.7085427135678392,0.47540983606557374,0.3434343434343434,0.15130023640661938,0.37853949329359166,0.26515798462852264,0.05,0.0
-0.4117647058823529,0.5728643216080402,0.5409836065573771,0.0,0.0,0.488822652757079,0.07685738684884713,0.35,1.0
-0.29411764705882354,0.49748743718592964,0.6065573770491803,0.2727272727272727,0.0,0.43219076005961254,0.0533731853116994,0.18333333333333332,0.0
-0.0,0.5477386934673367,0.7213114754098361,0.30303030303030304,0.0,0.4843517138599106,0.33176771989752346,0.2833333333333333,1.0
-0.11764705882352941,0.5477386934673367,0.7540983606557377,0.0,0.0,0.6363636363636365,0.32749786507258755,0.55,0.0
-0.058823529411764705,0.47738693467336685,0.5409836065573771,0.13131313131313133,0.04491725768321513,0.2921013412816692,0.10930828351836037,0.06666666666666667,0.0
-0.23529411764705882,0.7336683417085427,0.6967213114754098,0.2727272727272727,0.1182033096926714,0.4307004470938897,0.04739538855678907,0.1,0.0
-0.11764705882352941,0.5025125628140703,0.5409836065573771,0.20202020202020202,0.10638297872340426,0.4903129657228018,0.3368915456874466,0.11666666666666667,1.0
-0.29411764705882354,0.6984924623115578,0.5245901639344263,0.35353535353535354,0.16548463356973994,0.4262295081967214,0.14218616567036718,0.08333333333333333,0.0
-0.7647058823529411,0.6331658291457286,0.7377049180327869,0.0,0.0,0.646795827123696,0.21562766865926558,0.35,1.0
-0.23529411764705882,0.6482412060301508,0.7049180327868853,0.20202020202020202,0.3191489361702128,0.5230998509687035,0.06532877882152008,0.03333333333333333,0.0
-0.058823529411764705,0.3969849246231156,0.6147540983606558,0.30303030303030304,0.0,0.4769001490312966,0.13578138343296328,0.016666666666666666,0.0
-0.058823529411764705,0.0,0.39344262295081966,0.20202020202020202,0.0,0.3681073025335321,0.026473099914602907,0.016666666666666666,0.0
-0.4117647058823529,0.31155778894472363,0.639344262295082,0.0,0.0,0.48584202682563343,0.1336464560204953,0.3333333333333333,0.0
-0.29411764705882354,0.47738693467336685,0.5901639344262295,0.3333333333333333,0.0,0.5618479880774964,0.12467976088812979,0.1,0.0
-0.0,0.6582914572864321,0.0,0.0,0.0,0.6438152011922504,0.08198121263877028,0.08333333333333333,1.0
-0.11764705882352941,0.5628140703517588,0.5409836065573771,0.2222222222222222,0.0,0.37257824143070045,0.0977796754910333,0.05,0.0
-0.17647058823529413,0.5678391959798995,0.36065573770491804,0.13131313131313133,0.0,0.33383010432190763,0.026473099914602907,0.016666666666666666,0.0
-0.11764705882352941,0.37185929648241206,0.0,0.0,0.0,0.0,0.010247651579846282,0.016666666666666666,0.0
-0.4117647058823529,0.41708542713567837,0.639344262295082,0.26262626262626265,0.08392434988179669,0.436661698956781,0.29419299743808713,0.25,0.0
-0.0,0.507537688442211,0.5327868852459017,0.2828282828282828,0.0,0.3666169895678093,0.06789069171648163,0.016666666666666666,0.0
-0.29411764705882354,0.6884422110552764,0.8852459016393442,0.0,0.0,0.7272727272727273,0.0636208368915457,0.26666666666666666,1.0
-0.11764705882352941,0.5527638190954773,0.6065573770491803,0.29292929292929293,0.14775413711583923,0.4828614008941878,0.26473099914602904,0.1,0.0
-0.7647058823529411,0.5326633165829145,0.5901639344262295,0.5454545454545454,0.0,0.5454545454545455,0.042698548249359515,0.4,0.0
-0.11764705882352941,0.5025125628140703,0.5573770491803278,0.25252525252525254,0.08392434988179669,0.5737704918032788,0.10503842869342442,0.08333333333333333,0.0
-0.8823529411764706,0.6834170854271356,0.5737704918032787,0.32323232323232326,0.13002364066193853,0.5529061102831595,0.03202391118701964,0.36666666666666664,1.0
-0.058823529411764705,0.5376884422110553,0.5573770491803278,0.1919191919191919,0.0,0.3949329359165425,0.037147736976942784,0.05,0.0
-0.058823529411764705,0.4020100502512563,0.45081967213114754,0.0,0.0,0.28464977645305517,0.07685738684884713,0.0,0.0
-0.23529411764705882,0.6180904522613065,0.6557377049180327,0.15151515151515152,0.20803782505910165,0.4769001490312966,0.15584970111016225,0.21666666666666667,0.0
-0.4117647058823529,0.40703517587939697,0.639344262295082,0.40404040404040403,0.05673758865248227,0.6959761549925485,0.07813834329632792,0.35,0.0
-0.23529411764705882,0.6733668341708543,0.5901639344262295,0.0,0.0,0.35469448584202684,0.08497011101622545,0.65,1.0
-0.11764705882352941,0.7135678391959799,0.6721311475409836,0.18181818181818182,0.07565011820330969,0.3681073025335321,0.29163108454312553,0.0,0.0
-0.35294117647058826,0.7236180904522613,0.5901639344262295,0.2727272727272727,0.2695035460992908,0.5052160953800299,0.07557643040136634,0.31666666666666665,0.0
-0.11764705882352941,0.4623115577889447,0.5081967213114754,0.2828282828282828,0.0,0.4709388971684054,0.02220324508966695,0.05,0.0
-0.058823529411764705,0.35678391959798994,0.39344262295081966,0.18181818181818182,0.08983451536643026,0.30402384500745155,0.10461144321093083,0.016666666666666666,0.0
-0.35294117647058826,0.46733668341708545,0.4098360655737705,0.30303030303030304,0.07565011820330969,0.4277198211624441,0.11870196413321946,0.03333333333333333,0.0
-0.058823529411764705,0.6130653266331658,0.7377049180327869,0.5151515151515151,0.26004728132387706,0.7406855439642326,0.10546541417591801,0.16666666666666666,1.0
-0.058823529411764705,0.8190954773869347,0.5901639344262295,0.0,0.0,0.5812220566318927,0.48847139197267286,0.2,1.0
-0.058823529411764705,0.7587939698492462,0.4918032786885246,0.0,0.0,0.38897168405365135,0.04312553373185311,0.016666666666666666,0.0
-0.0,0.628140703517588,0.7868852459016393,0.0,0.0,0.33532041728763046,0.07856532877882151,0.0,0.0
-0.058823529411764705,0.40703517587939697,0.5901639344262295,0.18181818181818182,0.04728132387706856,0.3964232488822653,0.087532023911187,0.05,0.0
-0.11764705882352941,0.4271356783919598,0.5327868852459017,0.0,0.0,0.5901639344262296,0.36379163108454315,0.1,0.0
-0.058823529411764705,0.6331658291457286,0.45901639344262296,0.29292929292929293,0.17966903073286053,0.4277198211624441,0.30871050384286935,0.0,0.0
-0.058823529411764705,0.4824120603015075,1.0,0.0,0.0,0.33383010432190763,0.055081127241673786,0.1,0.0
-0.23529411764705882,0.7236180904522613,0.47540983606557374,0.2828282828282828,0.16548463356973994,0.4396423248882266,0.08923996584116138,0.26666666666666666,0.0
-0.17647058823529413,0.41708542713567837,0.47540983606557374,0.31313131313131315,0.02127659574468085,0.511177347242921,0.11016225448334757,0.06666666666666667,0.0
-0.0,0.47738693467336685,0.6967213114754098,0.25252525252525254,0.0425531914893617,0.5573770491803279,0.07216054654141758,0.05,1.0
-0.17647058823529413,0.8592964824120602,0.5901639344262295,0.3333333333333333,0.1595744680851064,0.496274217585693,0.051665243381725026,0.05,1.0
-0.47058823529411764,0.7788944723618091,0.5081967213114754,0.26262626262626265,0.5851063829787234,0.5067064083457526,0.19854824935952178,0.4166666666666667,1.0
-0.058823529411764705,0.4472361809045226,0.6229508196721312,0.3434343434343434,0.04373522458628842,0.46497764530551416,0.04867634500426986,0.03333333333333333,0.0
-0.23529411764705882,0.38190954773869346,0.5081967213114754,0.0,0.0,0.5067064083457526,0.1336464560204953,0.06666666666666667,0.0
-0.4117647058823529,0.8040201005025126,0.4426229508196721,0.32323232323232326,0.20685579196217493,0.4545454545454546,0.21776259607173357,0.3,1.0
-0.23529411764705882,0.7336683417085427,0.7540983606557377,0.0,0.0,0.46497764530551416,0.1968403074295474,0.6666666666666666,1.0
-0.29411764705882354,0.6231155778894473,0.6065573770491803,0.0,0.0,0.5067064083457526,0.06063193851409052,0.2833333333333333,1.0
-0.29411764705882354,0.39195979899497485,0.39344262295081966,0.0,0.0,0.5022354694485843,0.24594363791631085,0.06666666666666667,0.0
-0.23529411764705882,0.48743718592964824,0.4918032786885246,0.23232323232323232,0.0,0.42026825633383014,0.15584970111016225,0.016666666666666666,0.0
-0.23529411764705882,0.49748743718592964,0.6229508196721312,0.15151515151515152,0.06028368794326241,0.34575260804769004,0.06191289496157131,0.0,0.0
-0.0,0.8140703517587939,0.6229508196721312,0.5656565656565656,0.1182033096926714,0.7928464977645306,0.29077711357813835,0.06666666666666667,1.0
-0.35294117647058826,0.5577889447236181,0.5245901639344263,0.3939393939393939,0.0,0.5096870342771983,0.07771135781383433,0.05,0.0
-0.11764705882352941,0.5376884422110553,0.6065573770491803,0.30303030303030304,0.1182033096926714,0.5007451564828614,0.13919726729291204,0.03333333333333333,0.0
-0.29411764705882354,0.6633165829145728,0.6557377049180327,0.0,0.0,0.3994038748137109,0.04611443210930828,0.8,0.0
-0.0,0.5678391959798995,0.6229508196721312,0.0,0.0,0.496274217585693,0.08539709649871904,0.03333333333333333,1.0
-0.058823529411764705,0.44221105527638194,0.2459016393442623,0.42424242424242425,0.11702127659574468,0.819672131147541,0.1784799316823228,0.08333333333333333,1.0
-0.17647058823529413,0.6030150753768844,0.5737704918032787,0.30303030303030304,0.1595744680851064,0.639344262295082,0.1596925704526046,0.15,0.0
-0.058823529411764705,0.592964824120603,0.47540983606557374,0.36363636363636365,0.1111111111111111,0.496274217585693,0.07813834329632792,0.03333333333333333,0.0
-0.058823529411764705,0.5879396984924623,0.7213114754098361,0.24242424242424243,0.17139479905437352,0.5141579731743666,0.13877028181041845,0.31666666666666665,1.0
-0.0,0.5276381909547738,0.6885245901639344,0.0,0.0,0.4157973174366617,0.28309137489325364,0.6833333333333333,1.0
-0.23529411764705882,0.8693467336683417,0.5737704918032787,0.1414141414141414,0.19858156028368795,0.4426229508196722,0.12083689154568743,0.2,1.0
-0.5294117647058824,0.6130653266331658,0.45901639344262296,0.0,0.0,0.496274217585693,0.44235695986336465,0.2,1.0
-0.17647058823529413,0.8542713567839196,0.5245901639344263,0.37373737373737376,0.26595744680851063,0.5141579731743666,0.11870196413321946,0.15,1.0
-0.47058823529411764,0.4221105527638191,0.6065573770491803,0.31313131313131315,0.0,0.5707898658718331,0.16182749786507258,0.3,0.0
-0.11764705882352941,0.4824120603015075,0.5573770491803278,0.13131313131313133,0.057919621749408984,0.31445603576751124,0.24295473953885569,0.08333333333333333,0.0
-0.11764705882352941,0.628140703517588,0.4918032786885246,0.20202020202020202,0.16548463356973994,0.503725782414307,0.00426985482493595,0.16666666666666666,0.0
-0.0,0.5025125628140703,0.5737704918032787,0.26262626262626265,0.0591016548463357,0.459016393442623,0.22160546541417592,0.0,0.0
-0.0,0.46733668341708545,0.4918032786885246,0.25252525252525254,0.10874704491725769,0.4277198211624441,0.19385140905209222,0.016666666666666666,0.0
-0.0,0.6482412060301508,0.6557377049180327,0.0,0.0,0.46497764530551416,0.266865926558497,0.13333333333333333,0.0
-0.29411764705882354,0.5276381909547738,0.5901639344262295,0.29292929292929293,0.38416075650118203,0.5499254843517138,0.034585824081981215,0.11666666666666667,0.0
-0.17647058823529413,0.6432160804020101,0.639344262295082,0.0,0.0,0.31445603576751124,0.08112724167378309,0.5666666666666667,0.0
-0.29411764705882354,0.5326633165829145,0.6721311475409836,0.30303030303030304,0.0,0.5886736214605067,0.08881298035866779,0.2833333333333333,0.0
-0.11764705882352941,0.542713567839196,0.4262295081967213,0.26262626262626265,0.07446808510638298,0.4843517138599106,0.10247651579846284,0.016666666666666666,0.0
-0.5882352941176471,0.542713567839196,0.5409836065573771,0.0,0.0,0.4828614008941878,0.08283518360375747,0.35,1.0
-0.23529411764705882,0.7738693467336684,0.5081967213114754,0.31313131313131315,0.33569739952718675,0.488822652757079,0.06789069171648163,0.03333333333333333,0.0
-0.0,0.5125628140703518,0.6147540983606558,0.23232323232323232,0.0,0.0,0.210930828351836,0.0,0.0
-0.5294117647058824,0.2864321608040201,0.6557377049180327,0.37373737373737376,0.0,0.488822652757079,0.007685738684884714,0.3333333333333333,0.0
-0.11764705882352941,0.5326633165829145,0.5245901639344263,0.35353535353535354,0.14066193853427897,0.4545454545454546,0.5644748078565328,0.21666666666666667,0.0
-0.29411764705882354,0.7386934673366834,0.639344262295082,0.0,0.0,0.5022354694485843,0.059777967549103334,0.7333333333333333,0.0
-0.11764705882352941,0.45226130653266333,0.5737704918032787,0.1717171717171717,0.0,0.40685543964232496,0.002988898377455169,0.016666666666666666,0.0
-0.058823529411764705,0.6834170854271356,0.6065573770491803,0.5050505050505051,0.24113475177304963,0.5573770491803279,0.13706233988044406,0.05,0.0
-0.23529411764705882,0.5728643216080402,0.5327868852459017,0.0,0.0,0.3263785394932936,0.15115286080273269,0.26666666666666666,0.0
-0.5294117647058824,0.7839195979899497,0.7049180327868853,0.2828282828282828,0.18321513002364065,0.511177347242921,0.4743808710503843,0.35,1.0
-0.058823529411764705,0.7688442211055276,0.6721311475409836,0.42424242424242425,0.5732860520094563,0.6050670640834576,0.26003415883859954,0.03333333333333333,0.0
-0.47058823529411764,0.9447236180904522,0.639344262295082,0.0,0.0,0.7138599105812221,0.025192143467122122,0.36666666666666664,1.0
-0.4117647058823529,0.7638190954773869,0.7213114754098361,0.4444444444444444,0.0,0.7451564828614009,0.11058923996584116,0.25,1.0
-0.11764705882352941,0.49748743718592964,0.4262295081967213,0.15151515151515152,0.1111111111111111,0.3666169895678093,0.23868488471391974,0.0,0.0
-0.058823529411764705,0.5477386934673367,0.45901639344262296,0.21212121212121213,0.1595744680851064,0.37555886736214605,0.3223740392826644,0.03333333333333333,0.0
-0.11764705882352941,0.44221105527638194,0.6065573770491803,0.1919191919191919,0.06264775413711583,0.43219076005961254,0.06447480785653288,0.016666666666666666,0.0
-1.0,0.8190954773869347,0.5901639344262295,0.41414141414141414,0.1347517730496454,0.609538002980626,0.3155422715627669,0.43333333333333335,1.0
-0.23529411764705882,0.7587939698492462,0.7377049180327869,0.3838383838383838,0.0,0.4426229508196722,0.09222886421861655,0.25,0.0
-0.4117647058823529,0.5125628140703518,0.6065573770491803,0.40404040404040403,0.12411347517730496,0.5543964232488824,0.053800170794193,0.4,0.0
-0.0,0.5728643216080402,0.6557377049180327,0.3434343434343434,0.33687943262411346,0.6587183308494785,0.038001707941929974,0.1,0.0
-0.11764705882352941,0.5025125628140703,0.5245901639344263,0.23232323232323232,0.0,0.4426229508196722,0.1238257899231426,0.0,0.0
-0.0,0.6582914572864321,0.7213114754098361,0.0,0.0,0.4709388971684054,0.2839453458582408,0.18333333333333332,1.0
-0.35294117647058826,0.5226130653266332,0.6065573770491803,0.18181818181818182,0.18439716312056736,0.4456035767511177,0.2749786507258753,0.3333333333333333,1.0
-0.17647058823529413,0.7437185929648241,0.5409836065573771,0.25252525252525254,0.0,0.4843517138599106,0.07600341588385995,0.016666666666666666,0.0
-0.23529411764705882,0.6030150753768844,0.5573770491803278,0.0,0.0,0.4411326378539494,0.26942783945345855,0.21666666666666667,0.0
-0.23529411764705882,0.5527638190954773,0.5409836065573771,0.0,0.0,0.4754098360655738,0.1678052946199829,0.13333333333333333,0.0
-0.17647058823529413,0.5577889447236181,0.7377049180327869,0.12121212121212122,0.09219858156028368,0.42324888226527574,0.1780529461998292,0.13333333333333333,0.0
-0.35294117647058826,0.5125628140703518,0.6721311475409836,0.0,0.0,0.459016393442623,0.043552519214346705,0.25,1.0
-0.35294117647058826,0.6733668341708543,0.5737704918032787,0.23232323232323232,0.1536643026004728,0.5275707898658718,0.1981212638770282,0.13333333333333333,1.0
-0.11764705882352941,0.4371859296482412,0.0,0.23232323232323232,0.0,0.4307004470938897,0.2967549103330487,0.06666666666666667,0.0
-0.058823529411764705,0.3969849246231156,0.4918032786885246,0.42424242424242425,0.05673758865248227,0.6482861400894189,0.25619128949615716,0.03333333333333333,0.0
-0.11764705882352941,0.3768844221105528,0.5245901639344263,0.24242424242424243,0.06501182033096926,0.4426229508196722,0.12467976088812979,0.2,0.0
-0.47058823529411764,0.8994974874371859,0.5901639344262295,0.42424242424242425,0.1536643026004728,0.48733233979135626,0.2736976942783945,0.25,1.0
-0.35294117647058826,0.4271356783919598,0.639344262295082,0.0,0.0,0.46497764530551416,0.12980358667805295,0.35,0.0
-0.0,0.6482412060301508,0.9016393442622951,0.46464646464646464,0.1536643026004728,1.0,0.10290350128095645,0.08333333333333333,1.0
-0.29411764705882354,0.7185929648241206,0.639344262295082,0.0,0.0,0.6706408345752609,0.04782237403928266,0.43333333333333335,0.0
-0.29411764705882354,0.6532663316582915,0.6721311475409836,0.0,0.0,0.5827123695976155,0.3748932536293766,0.26666666666666666,1.0
-0.35294117647058826,0.4371859296482412,0.6557377049180327,0.0,0.0,0.34575260804769004,0.0025619128949615736,0.18333333333333332,0.0
-0.0,0.5979899497487438,0.5245901639344263,0.18181818181818182,0.10874704491725769,0.5201192250372578,0.2762596071733561,0.03333333333333333,0.0
-0.058823529411764705,0.0,0.6065573770491803,0.20202020202020202,0.027186761229314422,0.4128166915052161,0.09436379163108453,0.0,0.0
-0.29411764705882354,0.36683417085427134,0.4918032786885246,0.0,0.0,0.3994038748137109,0.08112724167378309,0.1,0.0
-0.23529411764705882,0.7085427135678392,0.6065573770491803,0.0,0.0,0.41132637853949333,0.0708795900939368,0.31666666666666665,0.0
-0.4117647058823529,0.9748743718592965,0.5573770491803278,0.2828282828282828,0.0,0.5350223546944859,0.284799316823228,0.3333333333333333,1.0
-0.47058823529411764,0.9095477386934674,0.5573770491803278,0.36363636363636365,0.5851063829787234,0.4485842026825634,0.22929120409906065,0.65,1.0
-0.058823529411764705,0.6432160804020101,0.8032786885245902,0.41414141414141414,0.06855791962174941,0.4769001490312966,0.5307429547395388,0.2,1.0
-0.47058823529411764,0.5477386934673367,0.6229508196721312,0.3939393939393939,0.1347517730496454,0.4157973174366617,0.23996584116140052,0.16666666666666666,1.0
-0.29411764705882354,0.6984924623115578,0.6557377049180327,0.35353535353535354,0.18912529550827423,0.4709388971684054,0.12083689154568743,0.06666666666666667,1.0
-0.17647058823529413,0.5577889447236181,0.5081967213114754,0.0,0.0,0.33681073025335323,0.027327070879590087,0.0,0.0
-0.5294117647058824,0.6180904522613065,0.5737704918032787,0.4444444444444444,0.1111111111111111,0.49329359165424747,0.1263877028181042,0.31666666666666665,0.0
-0.4117647058823529,0.7989949748743719,0.5409836065573771,0.0,0.0,0.45305514157973176,0.13023057216054654,0.25,1.0
-0.6470588235294118,0.678391959798995,0.0,0.0,0.0,0.7794336810730254,0.21349274124679757,0.31666666666666665,1.0
-0.47058823529411764,0.4271356783919598,0.45081967213114754,0.20202020202020202,0.0,0.36363636363636365,0.024765157984628527,0.35,0.0
-0.29411764705882354,0.7939698492462312,0.6885245901639344,0.41414141414141414,0.24822695035460993,0.587183308494784,0.1353543979504697,0.13333333333333333,1.0
-0.058823529411764705,0.5276381909547738,0.47540983606557374,0.0,0.0,0.3621460506706409,0.04654141759180188,0.0,0.0
-0.17647058823529413,0.5376884422110553,0.5081967213114754,0.13131313131313133,0.05673758865248227,0.3412816691505216,0.25619128949615716,0.03333333333333333,1.0
-0.23529411764705882,0.5477386934673367,0.5245901639344263,0.4444444444444444,0.11702127659574468,0.518628912071535,0.35311699402220326,0.08333333333333333,1.0
-0.23529411764705882,0.7437185929648241,0.4918032786885246,0.2727272727272727,0.375886524822695,0.4605067064083458,0.030742954739538853,0.13333333333333333,1.0
-0.0,0.5678391959798995,0.6557377049180327,0.16161616161616163,0.0,0.4619970193740686,0.3398804440649018,0.0,0.0
-0.058823529411764705,0.6934673366834171,0.6721311475409836,0.0,0.0,0.5976154992548436,0.06746370623398804,0.11666666666666667,0.0
-0.0,0.542713567839196,0.5573770491803278,0.20202020202020202,0.0,0.40685543964232496,0.302732707087959,0.18333333333333332,0.0
-0.11764705882352941,0.49748743718592964,0.5737704918032787,0.16161616161616163,0.05200945626477541,0.30402384500745155,0.06703672075149443,0.1,0.0
-0.35294117647058826,0.5175879396984925,0.5901639344262295,0.32323232323232326,0.22458628841607564,0.5618479880774964,0.10503842869342442,0.5666666666666667,0.0
-0.29411764705882354,0.5577889447236181,0.5901639344262295,0.2828282828282828,0.0,0.3561847988077496,0.14047822374039282,0.1,0.0
-0.47058823529411764,0.9849246231155779,0.6229508196721312,0.29292929292929293,0.3309692671394799,0.5588673621460507,0.22502134927412468,0.6,1.0
-0.29411764705882354,0.8140703517587939,0.8524590163934426,0.0,0.0,0.5618479880774964,0.031169940222032448,0.5166666666666667,1.0
-0.058823529411764705,0.4824120603015075,0.5245901639344263,0.2727272727272727,0.10283687943262411,0.4947839046199703,0.09009393680614858,0.0,0.0
-0.4117647058823529,0.9246231155778895,0.6885245901639344,0.3333333333333333,0.0,0.5290611028315947,0.11827497865072585,0.3333333333333333,1.0
-0.11764705882352941,0.40703517587939697,0.4918032786885246,0.2222222222222222,0.0,0.4128166915052161,0.09052092228864217,0.06666666666666667,0.0
-0.0,0.7386934673366834,0.6967213114754098,0.5454545454545454,0.0,0.6378539493293591,0.12681468830059778,0.05,0.0
-0.4117647058823529,0.8994974874371859,0.7786885245901639,0.31313131313131315,0.0,0.5096870342771983,0.03672075149444919,0.65,0.0
-0.0,0.7035175879396985,0.5327868852459017,0.26262626262626265,0.1536643026004728,0.6348733233979137,0.1507258753202391,0.05,1.0
-0.5294117647058824,0.5628140703517588,0.6721311475409836,0.32323232323232326,0.20685579196217493,0.5096870342771983,0.07771135781383433,0.25,1.0
-0.7058823529411765,0.7587939698492462,0.5737704918032787,0.40404040404040403,0.3203309692671395,0.6229508196721312,0.28351836037574724,0.2833333333333333,1.0
-0.29411764705882354,0.5477386934673367,0.5081967213114754,0.41414141414141414,0.1524822695035461,0.533532041728763,0.18616567036720752,0.06666666666666667,1.0
-0.35294117647058826,0.628140703517588,0.5573770491803278,0.30303030303030304,0.14184397163120568,0.44709388971684055,0.16481639624252775,0.18333333333333332,0.0
-0.29411764705882354,0.4271356783919598,0.6065573770491803,0.2222222222222222,0.0,0.43219076005961254,0.48932536293766005,0.18333333333333332,1.0
-0.29411764705882354,0.5628140703517588,0.5409836065573771,0.0,0.0,0.5633383010432191,0.07813834329632792,0.3333333333333333,1.0
-0.0,0.8894472361809045,0.4918032786885246,0.29292929292929293,0.5650118203309693,0.5156482861400895,0.4244235695986337,0.0,1.0
-0.11764705882352941,0.7939698492462312,0.7377049180327869,0.0,0.0,0.4709388971684054,0.3104184457728438,0.75,1.0
-0.4117647058823529,0.5979899497487438,0.0,0.0,0.0,0.37555886736214605,0.055935098206660976,0.26666666666666666,0.0
-0.4117647058823529,0.7135678391959799,0.4918032786885246,0.3333333333333333,0.22458628841607564,0.42921013412816694,0.26003415883859954,0.6666666666666666,0.0
-0.058823529411764705,0.5025125628140703,0.5409836065573771,0.15151515151515152,0.06619385342789598,0.3517138599105813,0.251067463706234,0.08333333333333333,0.0
-0.058823529411764705,0.4371859296482412,0.639344262295082,0.2727272727272727,0.037825059101654845,0.5156482861400895,0.009820666097352692,0.016666666666666666,0.0
-0.0,0.507537688442211,0.6229508196721312,0.0,0.0,0.5320417287630403,0.05123825789923143,0.08333333333333333,0.0
-0.17647058823529413,0.8140703517587939,0.4262295081967213,0.3838383838383838,0.0,0.5543964232488824,0.24508966695132367,0.05,1.0
-0.23529411764705882,0.9899497487437185,0.5737704918032787,0.3939393939393939,0.8794326241134752,0.5469448584202683,0.9611443210930829,0.16666666666666666,0.0
-0.0,0.5879396984924623,0.6557377049180327,0.31313131313131315,0.06264775413711583,0.6736214605067065,0.004696840307429545,0.05,0.0
-0.23529411764705882,0.7135678391959799,0.7049180327868853,0.0,0.0,0.6557377049180328,0.2421007685738685,0.016666666666666666,1.0
-0.35294117647058826,0.6733668341708543,0.6557377049180327,0.37373737373737376,0.4373522458628842,0.6885245901639345,0.06831767719897522,0.4166666666666667,1.0
-0.058823529411764705,0.3969849246231156,0.6557377049180327,0.25252525252525254,0.04373522458628842,0.37853949329359166,0.21562766865926558,0.016666666666666666,0.0
-0.23529411764705882,0.6130653266331658,0.5573770491803278,0.0,0.0,0.5216095380029807,0.1349274124679761,0.13333333333333333,0.0
-0.17647058823529413,0.37185929648241206,0.5573770491803278,0.2828282828282828,0.05319148936170213,0.4426229508196722,0.09180187873612296,0.03333333333333333,0.0
-0.23529411764705882,0.8592964824120602,0.5901639344262295,0.0,0.0,0.6497764530551416,0.17122117847993165,0.08333333333333333,1.0
-0.4117647058823529,0.9095477386934674,0.6885245901639344,0.21212121212121213,0.22695035460992907,0.5350223546944859,0.21690862510674636,0.5,1.0
-0.0,0.8994974874371859,0.7377049180327869,0.2727272727272727,0.0,0.6572280178837556,0.25960717335610595,0.03333333333333333,1.0
-0.5294117647058824,0.8241206030150754,0.6885245901639344,0.21212121212121213,0.0,0.459016393442623,0.3215200683176772,0.18333333333333332,1.0
-0.0,0.5226130653266332,0.6229508196721312,0.0,0.0,0.27421758569299554,0.215200683176772,0.1,0.0
-0.058823529411764705,0.457286432160804,0.5245901639344263,0.24242424242424243,0.0,0.43517138599105815,0.04867634500426986,0.0,0.0
-0.23529411764705882,0.457286432160804,0.5737704918032787,0.32323232323232326,0.10401891252955082,0.49329359165424747,0.15713065755764302,0.016666666666666666,0.0
-0.17647058823529413,0.6984924623115578,0.4426229508196721,0.0,0.0,0.3815201192250373,0.13834329632792486,0.016666666666666666,1.0
-0.35294117647058826,0.5979899497487438,0.4098360655737705,0.2222222222222222,0.20803782505910165,0.40387481371087935,0.5294619982920581,0.2,1.0
-0.11764705882352941,0.7336683417085427,0.6229508196721312,0.35353535353535354,0.2293144208037825,0.5692995529061103,0.10717335610589239,0.13333333333333333,0.0
-0.5294117647058824,0.9246231155778895,0.6967213114754098,0.15151515151515152,0.0,0.44709388971684055,0.48462852263023054,0.4666666666666667,1.0
-0.5882352941176471,0.6130653266331658,0.5573770491803278,0.0,0.0,0.46497764530551416,0.07685738684884713,0.3333333333333333,0.0
-0.0,0.8291457286432161,0.7377049180327869,0.3333333333333333,0.8037825059101655,0.7794336810730254,0.14901793339026473,0.03333333333333333,0.0
-0.5294117647058824,0.6231155778894473,0.5737704918032787,0.3333333333333333,0.475177304964539,0.5275707898658718,0.08710503842869341,0.21666666666666667,0.0
-0.058823529411764705,0.5577889447236181,0.7049180327868853,0.1919191919191919,0.0,0.4485842026825634,0.02775405636208368,0.03333333333333333,0.0
-0.5294117647058824,0.5326633165829145,0.4262295081967213,0.0,0.0,0.46497764530551416,0.12894961571306574,0.35,0.0
-0.11764705882352941,0.6482412060301508,0.6885245901639344,0.0,0.0,0.41728763040238454,0.08795900939368059,0.1,0.0
-0.11764705882352941,0.45226130653266333,0.6557377049180327,0.1414141414141414,0.06501182033096926,0.36363636363636365,0.07301451750640478,0.05,0.0
-0.0,0.4321608040201005,0.5573770491803278,0.32323232323232326,0.0,0.533532041728763,0.06831767719897522,0.06666666666666667,0.0
-0.7058823529411765,0.4623115577889447,0.5081967213114754,0.0707070707070707,0.3049645390070922,0.41132637853949333,0.3620836891545688,0.38333333333333336,1.0
-0.058823529411764705,0.5678391959798995,0.5245901639344263,0.35353535353535354,0.0,0.5007451564828614,0.19854824935952178,0.0,1.0
-0.17647058823529413,0.5577889447236181,0.45901639344262296,0.3939393939393939,0.0,0.4485842026825634,0.20452604611443212,0.15,0.0
-0.11764705882352941,0.5728643216080402,0.5573770491803278,0.2222222222222222,0.0,0.4277198211624441,0.005977796754910332,0.06666666666666667,0.0
-0.058823529411764705,0.9698492462311558,0.4098360655737705,0.16161616161616163,0.4432624113475177,0.3859910581222057,0.24637062339880447,0.05,0.0
-0.6470588235294118,0.7788944723618091,0.6229508196721312,0.2828282828282828,0.1773049645390071,0.496274217585693,0.5444064901793338,0.5,1.0
-0.17647058823529413,0.9597989949748744,0.5573770491803278,0.15151515151515152,0.1536643026004728,0.4605067064083458,0.09436379163108453,0.21666666666666667,0.0
-0.17647058823529413,0.7085427135678392,0.0,0.0,0.0,0.44709388971684055,0.29163108454312553,0.1,1.0
-0.23529411764705882,0.47738693467336685,0.5737704918032787,0.32323232323232326,0.0,0.47839046199701946,0.22801024765157984,0.05,0.0
-0.17647058823529413,0.7135678391959799,0.6557377049180327,0.15151515151515152,0.0,0.4828614008941878,0.05209222886421862,0.7,0.0
-0.23529411764705882,0.6180904522613065,0.5081967213114754,0.0,0.0,0.4769001490312966,0.0631938514090521,0.23333333333333334,1.0
-0.29411764705882354,0.4824120603015075,0.6065573770491803,0.18181818181818182,0.07919621749408984,0.5007451564828614,0.392399658411614,0.36666666666666664,0.0
-0.0,0.6934673366834171,0.0,0.0,0.0,0.5409836065573771,0.36507258753202393,0.06666666666666667,1.0
-0.11764705882352941,0.6432160804020101,0.5245901639344263,0.42424242424242425,0.0,0.5961251862891208,0.43680614859094785,0.05,0.0
-0.0,0.5125628140703518,0.4262295081967213,0.0,0.0,0.3740685543964233,0.0,0.0,0.0
-0.11764705882352941,0.7336683417085427,0.0,0.0,0.0,0.4098360655737705,0.06917164816396242,0.11666666666666667,1.0
-0.5882352941176471,0.507537688442211,0.7049180327868853,0.37373737373737376,0.0,0.6795827123695977,0.45175064047822366,0.2833333333333333,1.0
-0.11764705882352941,0.542713567839196,0.5081967213114754,0.32323232323232326,0.06619385342789598,0.37555886736214605,0.02134927412467976,0.0,0.0
-0.17647058823529413,0.6130653266331658,0.639344262295082,0.0,0.0,0.34277198211624443,0.07514944491887275,0.31666666666666665,0.0
-0.058823529411764705,0.35678391959798994,0.639344262295082,0.5050505050505051,0.05319148936170213,0.4947839046199703,0.14688300597779674,0.0,0.0
-0.7647058823529411,0.5326633165829145,0.5737704918032787,0.0,0.0,0.5096870342771983,0.07386848847139196,0.5166666666666667,0.0
-0.11764705882352941,0.5025125628140703,0.5737704918032787,0.5252525252525253,0.0673758865248227,0.6035767511177348,0.25576430401366357,0.06666666666666667,0.0
-0.4117647058823529,0.5326633165829145,0.4918032786885246,0.24242424242424243,0.0,0.3949329359165425,0.09308283518360375,0.13333333333333333,1.0
-0.0,0.5226130653266332,0.5245901639344263,0.23232323232323232,0.13711583924349882,0.41430700447093893,0.1605465414175918,0.03333333333333333,0.0
-0.29411764705882354,0.5728643216080402,0.6065573770491803,0.0,0.0,0.3710879284649777,0.2843723313407344,0.6,0.0
-0.11764705882352941,0.542713567839196,0.5081967213114754,0.10101010101010101,0.32860520094562645,0.3770491803278689,0.34286934244235695,0.016666666666666666,0.0
-0.0,0.7336683417085427,0.5737704918032787,0.0,0.0,0.5648286140089419,0.10930828351836037,0.11666666666666667,1.0
-0.5882352941176471,0.6482412060301508,0.6229508196721312,0.2828282828282828,0.14420803782505912,0.5350223546944859,0.08625106746370624,0.3,0.0
-0.4117647058823529,0.6683417085427136,0.7213114754098361,0.15151515151515152,0.18321513002364065,0.4828614008941878,0.07856532877882151,0.26666666666666666,0.0
-0.4117647058823529,0.8090452261306532,0.7049180327868853,0.0,0.0,0.45305514157973176,0.037147736976942784,0.43333333333333335,1.0
-0.11764705882352941,0.542713567839196,0.6557377049180327,0.0,0.0,0.4023845007451565,0.07728437233134072,0.5166666666666667,1.0
-0.4117647058823529,0.6834170854271356,0.6065573770491803,0.26262626262626265,0.1595744680851064,0.3874813710879285,0.24295473953885569,0.5,0.0
-0.29411764705882354,0.7788944723618091,0.6885245901639344,0.4444444444444444,0.6442080378250591,0.5767511177347244,0.230999146029035,0.21666666666666667,0.0
-0.058823529411764705,0.5979899497487438,0.7049180327868853,0.3939393939393939,0.26004728132387706,0.6795827123695977,0.31169940222032455,0.13333333333333333,1.0
-0.23529411764705882,0.4824120603015075,0.45901639344262296,0.1717171717171717,0.057919621749408984,0.3099850968703428,0.11187019641332195,0.08333333333333333,0.0
-0.29411764705882354,0.542713567839196,0.5901639344262295,0.43434343434343436,0.08865248226950355,0.5380029806259315,0.07899231426131512,0.2,0.0
-0.0,0.39195979899497485,0.7213114754098361,0.29292929292929293,0.04728132387706856,0.5499254843517138,0.1520068317677199,0.0,0.0
-0.0,0.5376884422110553,0.5081967213114754,0.30303030303030304,0.08747044917257683,0.5454545454545455,0.28992314261315116,0.06666666666666667,1.0
-0.11764705882352941,0.6432160804020101,0.639344262295082,0.37373737373737376,0.21513002364066194,0.6453055141579732,0.48932536293766005,0.16666666666666666,1.0
-0.058823529411764705,0.6432160804020101,0.39344262295081966,0.45454545454545453,0.2293144208037825,0.6035767511177348,0.22843723313407344,0.05,1.0
-0.0,0.8090452261306532,0.4098360655737705,0.0,0.0,0.3263785394932936,0.07514944491887275,0.7333333333333333,0.0
-0.35294117647058826,0.7587939698492462,0.5081967213114754,0.31313131313131315,0.14184397163120568,0.5290611028315947,0.26216908625106744,0.11666666666666667,0.0
-0.11764705882352941,0.7336683417085427,0.5737704918032787,0.3838383838383838,0.425531914893617,0.41728763040238454,0.11058923996584116,0.13333333333333333,1.0
-0.0,0.6331658291457286,0.6885245901639344,0.29292929292929293,0.2541371158392435,0.4575260804769002,0.18872758326216907,0.05,0.0
-0.8235294117647058,0.5025125628140703,0.639344262295082,0.25252525252525254,0.21749408983451538,0.5454545454545455,0.14261315115286077,0.4166666666666667,1.0
-0.47058823529411764,0.5628140703517588,0.5901639344262295,0.0,0.0,0.3517138599105813,0.32536293766011953,0.6166666666666667,0.0
-0.0,0.8391959798994975,0.0,0.0,0.0,0.481371087928465,0.32493595217762594,0.15,1.0
-0.11764705882352941,0.7236180904522613,0.47540983606557374,0.3333333333333333,0.1595744680851064,0.4709388971684054,0.14688300597779674,0.06666666666666667,1.0
-0.29411764705882354,0.3869346733668342,0.6721311475409836,0.41414141414141414,0.04964539007092199,0.533532041728763,0.033304867634500426,0.23333333333333334,0.0
-0.29411764705882354,0.5778894472361809,0.8032786885245902,0.0,0.0,0.7883755588673622,0.055935098206660976,0.11666666666666667,1.0
-0.17647058823529413,0.7537688442211056,0.6229508196721312,0.0,0.0,0.3129657228017884,0.055081127241673786,0.26666666666666666,0.0
-0.11764705882352941,0.6030150753768844,0.6229508196721312,0.37373737373737376,0.12411347517730496,0.5916542473919524,0.058497011101622545,0.13333333333333333,0.0
-0.5882352941176471,0.8090452261306532,0.5573770491803278,0.23232323232323232,0.15602836879432624,0.3800298062593145,0.10589239965841162,0.43333333333333335,1.0
-0.0,0.6884422110552764,0.5573770491803278,0.1414141414141414,0.17494089834515367,0.3695976154992549,0.02775405636208368,0.0,0.0
-0.0,0.6432160804020101,0.5573770491803278,0.1919191919191919,0.2127659574468085,0.4545454545454546,0.5606319385140904,0.06666666666666667,1.0
-0.11764705882352941,0.6231155778894473,0.5573770491803278,0.2828282828282828,0.24231678486997635,0.4903129657228018,0.3403074295473954,0.15,1.0
-0.35294117647058826,0.4020100502512563,0.5409836065573771,0.30303030303030304,0.0,0.3904619970193741,0.10034158838599487,0.3333333333333333,0.0
-0.0,0.5326633165829145,0.5737704918032787,0.37373737373737376,0.17494089834515367,0.587183308494784,0.22502134927412468,0.016666666666666666,0.0
-0.11764705882352941,0.7788944723618091,0.6065573770491803,0.1717171717171717,0.11347517730496454,0.3964232488822653,0.15157984628522628,0.1,1.0
-0.17647058823529413,0.5678391959798995,0.4098360655737705,0.10101010101010101,0.10047281323877069,0.4396423248882266,0.23398804440649018,0.06666666666666667,0.0
-0.4117647058823529,0.5477386934673367,0.6557377049180327,0.31313131313131315,0.0,0.5350223546944859,0.44790777113578134,0.36666666666666664,1.0
-0.11764705882352941,0.5628140703517588,0.5573770491803278,0.2222222222222222,0.1111111111111111,0.5081967213114754,0.10119555935098205,0.08333333333333333,0.0
-0.17647058823529413,0.49748743718592964,0.6557377049180327,0.1111111111111111,0.07565011820330969,0.28763040238450077,0.08795900939368059,0.15,0.0
-0.17647058823529413,0.914572864321608,0.6065573770491803,0.0,0.0,0.4545454545454546,0.11400512382578991,0.13333333333333333,1.0
-0.17647058823529413,0.5778894472361809,0.5409836065573771,0.3939393939393939,0.16548463356973994,0.5678092399403876,0.030742954739538853,0.11666666666666667,0.0
-0.35294117647058826,0.9748743718592965,0.639344262295082,0.0,0.0,0.35022354694485847,0.021776259607173356,0.6333333333333333,1.0
-0.23529411764705882,0.6482412060301508,0.4918032786885246,0.12121212121212122,0.2730496453900709,0.4098360655737705,0.19171648163962424,0.16666666666666666,0.0
-0.17647058823529413,0.5628140703517588,0.6065573770491803,0.30303030303030304,0.0,0.4709388971684054,0.05081127241673783,0.06666666666666667,1.0
-0.0,0.6231155778894473,0.5737704918032787,0.20202020202020202,0.0,0.40834575260804773,0.07514944491887275,0.25,1.0
-0.7647058823529411,0.7638190954773869,0.7377049180327869,0.3333333333333333,0.034278959810874705,0.3994038748137109,0.2788215200683177,0.36666666666666664,1.0
-0.11764705882352941,0.5628140703517588,0.6147540983606558,0.32323232323232326,0.0,0.5320417287630403,0.02988898377455166,0.0,0.0
-0.058823529411764705,0.7889447236180904,0.5901639344262295,0.21212121212121213,0.19858156028368795,0.3815201192250373,0.019214346712211783,0.05,0.0
-0.058823529411764705,0.6130653266331658,0.5245901639344263,0.32323232323232326,0.18439716312056736,0.5230998509687035,0.26216908625106744,0.15,1.0
-0.5882352941176471,0.8994974874371859,0.5737704918032787,0.0,0.0,0.5230998509687035,0.05209222886421862,0.26666666666666666,0.0
-0.11764705882352941,0.5125628140703518,0.7049180327868853,0.36363636363636365,0.14184397163120568,0.6780923994038749,0.020922288642186166,0.03333333333333333,1.0
-0.35294117647058826,0.5276381909547738,0.5737704918032787,0.32323232323232326,0.08037825059101655,0.459016393442623,0.018787361229718188,0.26666666666666666,0.0
-0.47058823529411764,0.592964824120603,0.5901639344262295,0.1919191919191919,0.0,0.34426229508196726,0.5969257045260461,0.4166666666666667,0.0
-0.11764705882352941,0.4371859296482412,0.47540983606557374,0.16161616161616163,0.061465721040189124,0.48733233979135626,0.03757472245943638,0.06666666666666667,0.0
-0.058823529411764705,0.9045226130653267,0.0,0.0,0.0,0.6453055141579732,0.08710503842869341,0.3333333333333333,1.0
-0.7058823529411765,0.5326633165829145,0.6557377049180327,0.0,0.0,0.3517138599105813,0.025192143467122122,0.38333333333333336,0.0
-0.058823529411764705,0.47738693467336685,0.4918032786885246,0.18181818181818182,0.06855791962174941,0.3561847988077496,0.07771135781383433,0.016666666666666666,0.0
-0.0,0.8291457286432161,0.6229508196721312,0.43434343434343436,0.30141843971631205,0.7138599105812221,0.07728437233134072,0.08333333333333333,0.0
-0.0,0.5879396984924623,0.0,0.0,0.0,0.503725782414307,0.36464560204953034,0.38333333333333336,0.0
-0.29411764705882354,0.5778894472361809,0.6229508196721312,0.0,0.0,0.46497764530551416,0.11315115286080274,0.38333333333333336,1.0
-0.5294117647058824,0.7638190954773869,0.639344262295082,0.3434343434343434,0.20212765957446807,0.5096870342771983,0.3479931682322801,0.2,1.0
-0.4117647058823529,0.8944723618090452,0.6885245901639344,0.0,0.0,0.5946348733233979,0.10802732707087959,0.3333333333333333,1.0
-0.058823529411764705,0.6532663316582915,0.5737704918032787,0.13131313131313133,0.12411347517730496,0.3859910581222057,0.16823228010247648,0.016666666666666666,0.0
-0.058823529411764705,0.47738693467336685,0.6065573770491803,0.21212121212121213,0.08628841607565012,0.3859910581222057,0.2540563620836892,0.25,0.0
-0.058823529411764705,0.0,0.5573770491803278,0.35353535353535354,0.0,0.4769001490312966,0.1327924850555081,0.016666666666666666,0.0
-0.29411764705882354,0.6130653266331658,0.7049180327868853,0.0,0.0,0.5171385991058123,0.09052092228864217,0.2,0.0
-0.47058823529411764,0.47738693467336685,0.5901639344262295,0.0,0.0,0.5484351713859911,0.17378309137489323,0.6,0.0
-0.47058823529411764,0.6331658291457286,0.7213114754098361,0.36363636363636365,0.1276595744680851,0.5737704918032788,0.11571306575576429,0.4666666666666667,0.0
-0.058823529411764705,0.6984924623115578,0.3770491803278688,0.1919191919191919,0.09810874704491726,0.4277198211624441,0.24594363791631085,0.016666666666666666,0.0
-0.17647058823529413,0.5829145728643216,0.0,0.0,0.0,0.35022354694485847,0.04654141759180188,0.03333333333333333,0.0
-0.17647058823529413,0.49748743718592964,0.5081967213114754,0.1919191919191919,0.08747044917257683,0.3248882265275708,0.08582408198121264,0.08333333333333333,0.0
-0.29411764705882354,0.0,0.6557377049180327,0.32323232323232326,0.0,0.6110283159463488,0.1144321093082835,0.26666666666666666,1.0
-0.23529411764705882,0.4623115577889447,0.6557377049180327,0.0,0.0,0.6289120715350225,0.06789069171648163,0.13333333333333333,0.0
-0.23529411764705882,0.6884422110552764,0.6885245901639344,0.0,0.0,0.46497764530551416,0.07429547395388555,0.15,0.0
-0.17647058823529413,0.3065326633165829,0.6721311475409836,0.2828282828282828,0.0,0.5126676602086438,0.0704526046114432,0.4166666666666667,0.0
-0.058823529411764705,0.45226130653266333,0.5081967213114754,0.12121212121212122,0.0508274231678487,0.4053651266766021,0.21434671221178478,0.05,0.0
-0.17647058823529413,0.45226130653266333,0.639344262295082,0.0,0.0,0.6363636363636365,0.2053800170794193,0.0,0.0
-0.5294117647058824,0.8291457286432161,0.7213114754098361,0.0,0.0,0.45305514157973176,0.09564474807856531,0.4666666666666667,1.0
-0.058823529411764705,0.628140703517588,0.4098360655737705,0.40404040404040403,0.19739952718676124,0.496274217585693,0.37745516652433814,0.11666666666666667,1.0
-0.7647058823529411,0.6482412060301508,0.0,0.30303030303030304,0.0,0.5946348733233979,0.20964987190435522,0.38333333333333336,1.0
-0.7058823529411765,0.44221105527638194,0.6065573770491803,0.40404040404040403,0.06382978723404255,0.526080476900149,0.12809564474807855,0.45,0.0
-0.058823529411764705,0.9849246231155779,0.6229508196721312,0.36363636363636365,0.29432624113475175,0.5439642324888228,0.3403074295473954,0.13333333333333333,1.0
-0.29411764705882354,0.949748743718593,0.5245901639344263,0.3333333333333333,0.38416075650118203,0.46497764530551416,0.21562766865926558,0.13333333333333333,1.0
-0.29411764705882354,0.7939698492462312,0.5737704918032787,0.0,0.0,0.444113263785395,0.055081127241673786,0.7,0.0
-0.29411764705882354,0.5175879396984925,0.8852459016393442,0.37373737373737376,0.0,0.5842026825633384,0.0969257045260461,0.7333333333333333,0.0
-0.23529411764705882,0.7336683417085427,0.639344262295082,0.0,0.0,0.5737704918032788,0.18872758326216907,0.7666666666666667,1.0
-0.23529411764705882,0.7386934673366834,0.6065573770491803,0.25252525252525254,0.3463356973995272,0.5201192250372578,0.13108454312553372,0.15,0.0
-0.29411764705882354,0.49748743718592964,0.4426229508196721,0.2828282828282828,0.09810874704491726,0.5067064083457526,0.17976088812980356,0.15,0.0
-0.35294117647058826,0.6231155778894473,0.5901639344262295,0.0,0.0,0.41132637853949333,0.1238257899231426,0.13333333333333333,1.0
-0.0,0.507537688442211,0.5245901639344263,0.1717171717171717,0.0,0.3129657228017884,0.07429547395388555,0.0,0.0
-0.17647058823529413,0.40703517587939697,0.7049180327868853,0.16161616161616163,0.07801418439716312,0.4098360655737705,0.0973526900085397,0.016666666666666666,0.0
-0.058823529411764705,0.6683417085427136,0.8360655737704918,0.2828282828282828,0.16548463356973994,0.488822652757079,0.06660973526900087,0.4,1.0
-0.17647058823529413,0.8693467336683417,0.6721311475409836,0.48484848484848486,0.549645390070922,0.5722801788375559,0.8791631084543126,0.06666666666666667,1.0
-0.0,0.592964824120603,0.5245901639344263,0.23232323232323232,0.10520094562647754,0.0,0.7058070025619129,0.0,0.0
-0.0,0.4221105527638191,0.5245901639344263,0.2222222222222222,0.07801418439716312,0.533532041728763,0.19940222032450897,0.0,0.0
-0.11764705882352941,0.5276381909547738,0.47540983606557374,0.40404040404040403,0.1111111111111111,0.5201192250372578,0.0627668659265585,0.06666666666666667,0.0
-0.11764705882352941,0.6130653266331658,0.4262295081967213,0.43434343434343436,0.1867612293144208,0.5394932935916543,0.3151152860802733,0.11666666666666667,0.0
-0.7058823529411765,0.7035175879396985,0.6721311475409836,0.43434343434343436,0.38416075650118203,0.5842026825633384,0.19214346712211786,0.6166666666666667,1.0
-0.0,0.49246231155778897,0.6721311475409836,0.15151515151515152,0.09929078014184398,0.37555886736214605,0.09436379163108453,0.016666666666666666,0.0
-0.058823529411764705,0.4371859296482412,0.4918032786885246,0.37373737373737376,0.08865248226950355,0.5543964232488824,0.18403074295473953,0.016666666666666666,0.0
-0.23529411764705882,0.7839195979899497,0.6147540983606558,0.0,0.0,0.7198211624441133,0.06831767719897522,0.18333333333333332,1.0
-0.0,0.46733668341708545,0.819672131147541,0.3939393939393939,0.0851063829787234,0.646795827123696,0.40264730999146026,0.23333333333333334,0.0
-0.058823529411764705,0.5376884422110553,0.5901639344262295,0.30303030303030304,0.09692671394799054,0.459016393442623,0.31725021349274124,0.05,0.0
-0.0,0.5276381909547738,0.5573770491803278,0.2222222222222222,0.0,0.2980625931445604,0.06746370623398804,0.016666666666666666,0.0
-0.058823529411764705,0.5477386934673367,0.4918032786885246,0.08080808080808081,0.21513002364066194,0.37853949329359166,0.3710503842869342,0.0,0.0
-0.058823529411764705,0.45226130653266333,0.5081967213114754,0.18181818181818182,0.06973995271867613,0.3740685543964233,0.5081127241673783,0.06666666666666667,0.0
-0.058823529411764705,0.628140703517588,0.5737704918032787,0.24242424242424243,0.13002364066193853,0.3621460506706409,0.06105892399658412,0.06666666666666667,0.0
-0.058823529411764705,0.5979899497487438,0.4426229508196721,0.13131313131313133,0.0591016548463357,0.33233979135618485,0.05422715627668659,0.05,0.0
-0.29411764705882354,0.5829145728643216,0.6065573770491803,0.29292929292929293,0.0,0.481371087928465,0.24850555081127243,0.23333333333333334,1.0
-0.47058823529411764,0.5276381909547738,0.819672131147541,0.36363636363636365,0.0,0.6453055141579732,0.06874466268146882,0.4,1.0
-0.29411764705882354,0.7236180904522613,0.6721311475409836,0.26262626262626265,0.33687943262411346,0.4769001490312966,0.1596925704526046,0.6166666666666667,1.0
-0.17647058823529413,0.5025125628140703,0.5573770491803278,0.23232323232323232,0.09574468085106383,0.4709388971684054,0.3719043552519214,0.11666666666666667,0.0
-0.058823529411764705,0.5025125628140703,0.5409836065573771,0.29292929292929293,0.23167848699763594,0.4769001490312966,0.15627668659265584,0.35,0.0
-0.29411764705882354,0.8341708542713567,0.6229508196721312,0.0,0.0,0.6810730253353205,0.11187019641332195,0.1,1.0
-0.058823529411764705,0.6582914572864321,0.5245901639344263,0.1414141414141414,0.4905437352245863,0.35320417287630407,0.1327924850555081,0.0,0.0
-0.23529411764705882,0.5829145728643216,0.5901639344262295,0.12121212121212122,0.10283687943262411,0.32935916542473925,0.16438941076003416,0.26666666666666666,0.0
-0.23529411764705882,0.7939698492462312,0.639344262295082,0.0,0.0,0.4903129657228018,0.30956447480785654,0.16666666666666666,1.0
-0.11764705882352941,0.6381909547738693,0.47540983606557374,0.24242424242424243,0.32505910165484636,0.4128166915052161,0.6498719043552519,0.06666666666666667,0.0
-0.17647058823529413,0.4824120603015075,0.45901639344262296,0.3434343434343434,0.1359338061465721,0.3681073025335321,0.36976942783945344,0.3,0.0
-0.0,0.6582914572864321,0.5409836065573771,0.40404040404040403,0.0,0.511177347242921,0.05038428693424424,0.016666666666666666,1.0
-0.17647058823529413,0.4120603015075377,0.5737704918032787,0.0,0.0,0.31445603576751124,0.1327924850555081,0.06666666666666667,0.0
-0.17647058823529413,0.9698492462311558,0.5737704918032787,0.31313131313131315,0.0,0.5201192250372578,0.069598633646456,0.06666666666666667,1.0
-0.23529411764705882,0.47738693467336685,0.5245901639344263,0.0,0.0,0.4769001490312966,0.035439795046968404,0.16666666666666666,1.0
-0.35294117647058826,0.6884422110552764,0.5,0.0,0.0,0.36065573770491804,0.031169940222032448,0.5666666666666667,0.0
-0.29411764705882354,0.6834170854271356,0.6885245901639344,0.41414141414141414,0.10401891252955082,0.5216095380029807,0.08881298035866779,0.23333333333333334,1.0
-0.5294117647058824,0.36180904522613067,0.639344262295082,0.25252525252525254,0.0,0.4709388971684054,0.08625106746370624,0.2833333333333333,0.0
-0.29411764705882354,0.8442211055276382,0.5245901639344263,0.0,0.0,0.4903129657228018,0.02433817250213493,0.3333333333333333,1.0
-0.11764705882352941,0.6180904522613065,0.39344262295081966,0.32323232323232326,0.1950354609929078,0.6274217585692996,0.18872758326216907,0.08333333333333333,0.0
-0.23529411764705882,0.5778894472361809,0.5901639344262295,0.0,0.0,0.4307004470938897,0.12724167378309137,0.4166666666666667,1.0
-0.0,0.507537688442211,0.5081967213114754,0.0,0.0,0.3263785394932936,0.11016225448334757,0.06666666666666667,0.0
-0.47058823529411764,0.9899497487437185,0.6065573770491803,0.0,0.0,0.3859910581222057,0.4752348420153715,0.3,1.0
-0.058823529411764705,0.864321608040201,0.5573770491803278,0.494949494949495,0.6843971631205674,0.631892697466468,0.2664389410760034,0.11666666666666667,1.0
-0.35294117647058826,0.5125628140703518,0.7377049180327869,0.3939393939393939,0.0,0.5320417287630403,0.2544833475661828,0.11666666666666667,0.0
-0.058823529411764705,0.5628140703517588,0.5901639344262295,0.30303030303030304,0.20803782505910165,0.5126676602086438,0.19214346712211786,0.06666666666666667,0.0
-0.058823529411764705,0.7185929648241206,0.6885245901639344,0.23232323232323232,0.3664302600472813,0.631892697466468,0.42613151152860806,0.016666666666666666,0.0
-0.058823529411764705,0.7185929648241206,0.6065573770491803,0.2222222222222222,0.07210401891252956,0.3904619970193741,0.07600341588385995,0.0,0.0
-0.0,0.6934673366834171,0.4918032786885246,0.35353535353535354,0.19739952718676124,0.5156482861400895,0.19470538001707943,0.0,1.0
-0.17647058823529413,0.8693467336683417,0.6885245901639344,0.3333333333333333,0.5602836879432624,0.5320417287630403,0.07685738684884713,0.016666666666666666,1.0
-0.058823529411764705,0.48743718592964824,0.5573770491803278,0.21212121212121213,0.0,0.4053651266766021,0.4342442356959863,0.016666666666666666,0.0
-0.23529411764705882,0.7236180904522613,0.6721311475409836,0.32323232323232326,0.0,0.5737704918032788,0.20324508966695132,0.26666666666666666,1.0
-0.058823529411764705,0.41708542713567837,0.5573770491803278,0.0,0.0,0.27123695976154993,0.233134073441503,0.1,0.0
-0.17647058823529413,0.6482412060301508,0.5245901639344263,0.29292929292929293,0.1359338061465721,0.39344262295081966,0.06020495303159693,0.11666666666666667,1.0
-0.058823529411764705,0.5979899497487438,0.7213114754098361,0.41414141414141414,0.20094562647754138,0.6751117734724292,0.18317677198975235,0.08333333333333333,0.0
-0.11764705882352941,0.4723618090452261,0.5573770491803278,0.18181818181818182,0.08983451536643026,0.3874813710879285,0.2062339880444065,0.0,0.0
-0.0,0.5125628140703518,0.5245901639344263,0.46464646464646464,0.09219858156028368,0.6050670640834576,0.1784799316823228,0.0,0.0
-0.11764705882352941,0.5778894472361809,0.5245901639344263,0.2222222222222222,0.0,0.459016393442623,0.14645602049530315,0.0,0.0
-0.47058823529411764,0.7587939698492462,0.639344262295082,0.32323232323232326,0.24822695035460993,0.639344262295082,0.1870196413321947,0.25,1.0
-0.23529411764705882,0.9246231155778895,0.639344262295082,0.3939393939393939,0.32742316784869974,0.5514157973174367,0.07941929974380871,0.16666666666666666,1.0
-0.0,0.4723618090452261,0.0,0.0,0.0,0.0,0.07600341588385995,0.06666666666666667,0.0
-0.058823529411764705,0.9095477386934674,0.5245901639344263,0.30303030303030304,0.2127659574468085,0.5081967213114754,0.1067463706233988,0.2833333333333333,1.0
-0.0,0.678391959798995,0.7704918032786885,0.46464646464646464,0.17139479905437352,0.6050670640834576,0.08795900939368059,0.08333333333333333,0.0
-0.058823529411764705,0.47738693467336685,0.6721311475409836,0.25252525252525254,0.2127659574468085,0.5216095380029807,0.06618274978650726,0.36666666666666664,1.0
-0.11764705882352941,0.49748743718592964,0.0,0.0,0.0,0.330849478390462,0.012809564474807855,0.03333333333333333,0.0
-0.17647058823529413,0.4472361809045226,0.6065573770491803,0.16161616161616163,0.10047281323877069,0.45305514157973176,0.20196413321947054,0.2833333333333333,0.0
-0.058823529411764705,0.4020100502512563,0.6065573770491803,0.1111111111111111,0.07092198581560284,0.44709388971684055,0.19171648163962424,0.016666666666666666,0.0
-0.11764705882352941,0.6984924623115578,0.6147540983606558,0.0,0.0,0.3815201192250373,0.038001707941929974,0.13333333333333333,0.0
-0.058823529411764705,0.45226130653266333,0.5573770491803278,0.08080808080808081,0.0,0.3651266766020865,0.45260461144321085,0.25,0.0
-0.0,0.7085427135678392,0.0,0.0,0.0,0.631892697466468,0.05422715627668659,0.13333333333333333,1.0
-0.7058823529411765,0.7035175879396985,0.6967213114754098,0.3333333333333333,0.0,0.5573770491803279,0.0708795900939368,0.3333333333333333,0.0
-0.29411764705882354,0.7386934673366834,0.6147540983606558,0.0,0.0,0.4456035767511177,0.1520068317677199,0.11666666666666667,0.0
-0.058823529411764705,0.48743718592964824,0.5737704918032787,0.15151515151515152,0.0,0.27123695976154993,0.029461998292058065,0.0,0.0
-0.35294117647058826,0.5376884422110553,0.7213114754098361,0.0,0.0,0.5484351713859911,0.2771135781383433,0.16666666666666666,0.0
-0.0,0.949748743718593,0.8524590163934426,0.25252525252525254,0.0,0.511177347242921,0.1524338172502135,0.3333333333333333,1.0
-0.11764705882352941,0.41708542713567837,0.5409836065573771,0.23232323232323232,0.0591016548463357,0.4798807749627423,0.17890691716481638,0.016666666666666666,0.0
-0.23529411764705882,0.5879396984924623,0.5245901639344263,0.2727272727272727,0.14184397163120568,0.4947839046199703,0.06490179333902649,0.05,0.0
-0.47058823529411764,0.542713567839196,0.5737704918032787,0.0,0.0,0.4545454545454546,0.374466268146883,0.2,1.0
-0.23529411764705882,0.5879396984924623,0.5081967213114754,0.12121212121212122,0.0,0.4426229508196722,0.12894961571306574,0.15,1.0
-0.0,0.9045226130653267,0.639344262295082,0.6363636363636364,0.016548463356973995,0.8852459016393444,1.0,0.06666666666666667,1.0
-0.058823529411764705,0.5025125628140703,0.5901639344262295,0.12121212121212122,0.08274231678486997,0.3770491803278689,0.24765157984628525,0.11666666666666667,0.0
-0.0,0.47738693467336685,0.6557377049180327,0.45454545454545453,0.10874704491725769,0.5439642324888228,0.107600341588386,0.08333333333333333,0.0
-0.0,0.5226130653266332,0.5245901639344263,0.37373737373737376,0.07565011820330969,0.5007451564828614,0.18445772843723313,0.016666666666666666,1.0
-0.0,0.6030150753768844,0.6065573770491803,0.18181818181818182,0.07446808510638298,0.4545454545454546,0.08838599487617418,0.08333333333333333,0.0
-0.058823529411764705,0.4120603015075377,0.5245901639344263,0.13131313131313133,0.11229314420803782,0.315946348733234,0.14389410760034158,0.03333333333333333,0.0
-0.11764705882352941,0.6733668341708543,0.5737704918032787,0.0,0.0,0.4307004470938897,0.1981212638770282,0.03333333333333333,1.0
-0.0,0.457286432160804,0.5573770491803278,0.32323232323232326,0.24822695035460993,0.5946348733233979,0.12937660119555935,0.06666666666666667,0.0
-0.11764705882352941,0.5979899497487438,0.0,0.0,0.0,0.2921013412816692,0.3219470538001708,0.85,0.0
-0.11764705882352941,0.5025125628140703,0.4426229508196721,0.2828282828282828,0.12411347517730496,0.5633383010432191,0.17933390264730997,0.05,0.0
-0.8235294117647058,0.8793969849246231,0.5081967213114754,0.30303030303030304,0.0,0.5007451564828614,0.057216054654141764,0.2833333333333333,1.0
-0.058823529411764705,0.678391959798995,0.4426229508196721,0.0,0.0,0.3979135618479881,0.26003415883859954,0.6833333333333333,0.0
-0.29411764705882354,0.4321608040201005,0.5573770491803278,0.2828282828282828,0.08392434988179669,0.45007451564828616,0.12211784799316822,0.05,0.0
-0.5882352941176471,0.7437185929648241,0.6885245901639344,0.48484848484848486,0.2801418439716312,0.5603576751117736,0.3941076003415883,0.5,1.0
-0.5294117647058824,0.6733668341708543,0.6065573770491803,0.3333333333333333,0.07092198581560284,0.3859910581222057,0.16310845431255336,1.0,0.0
-0.5294117647058824,0.6030150753768844,0.5901639344262295,0.2222222222222222,0.06619385342789598,0.3099850968703428,0.27967549103330486,0.45,0.0
-0.058823529411764705,0.35678391959798994,0.5081967213114754,0.0,0.0,0.3248882265275708,0.14432109308283517,0.08333333333333333,0.0
-0.47058823529411764,0.37185929648241206,0.5737704918032787,0.40404040404040403,0.057919621749408984,0.526080476900149,0.2677198975234842,0.3,0.0
-0.29411764705882354,0.44221105527638194,0.639344262295082,0.30303030303030304,0.0,0.41132637853949333,0.07685738684884713,0.26666666666666666,0.0
-0.5882352941176471,0.5778894472361809,0.8032786885245902,0.0,0.0,0.35767511177347244,0.4030742954739539,0.21666666666666667,0.0
-0.0,0.6231155778894473,0.45901639344262296,0.13131313131313133,0.12411347517730496,0.3248882265275708,0.1596925704526046,0.0,0.0
-0.0,0.37185929648241206,0.4262295081967213,0.10101010101010101,0.0425531914893617,0.41430700447093893,0.08155422715627668,0.016666666666666666,0.0
-0.0,0.48743718592964824,0.5245901639344263,0.36363636363636365,0.1182033096926714,0.5484351713859911,0.2228864218616567,0.06666666666666667,0.0
-0.47058823529411764,0.6030150753768844,0.0,0.0,0.0,0.44709388971684055,0.04483347566182749,0.2833333333333333,1.0
-0.35294117647058826,0.7738693467336684,0.639344262295082,0.41414141414141414,0.16548463356973994,0.6870342771982118,0.2105038428693424,0.1,0.0
-0.058823529411764705,0.7236180904522613,0.6721311475409836,0.40404040404040403,0.0,0.6154992548435172,0.22587532023911186,0.11666666666666667,0.0
-0.0,0.6884422110552764,0.5737704918032787,0.3838383838383838,0.0,0.4947839046199703,0.03928266438941076,0.016666666666666666,0.0
-0.0,0.5979899497487438,0.5409836065573771,0.2727272727272727,0.0,0.5782414307004471,0.07728437233134072,0.016666666666666666,0.0
-0.4117647058823529,0.6834170854271356,0.7377049180327869,0.0,0.0,0.4456035767511177,0.05636208368915457,0.48333333333333334,0.0
-0.23529411764705882,0.5728643216080402,0.5245901639344263,0.0,0.0,0.4307004470938897,0.02049530315969257,0.05,0.0
-0.0,0.6884422110552764,0.6885245901639344,0.2727272727272727,0.0,0.40685543964232496,0.06532877882152008,0.6333333333333333,0.0
-0.11764705882352941,0.5276381909547738,0.6557377049180327,0.45454545454545453,0.22576832151300236,0.5022354694485843,0.2702818104184458,0.13333333333333333,1.0
-0.4117647058823529,0.5728643216080402,0.6229508196721312,0.1717171717171717,0.13002364066193853,0.35469448584202684,0.16567036720751493,0.16666666666666666,0.0
-0.47058823529411764,0.6331658291457286,0.6065573770491803,0.3838383838383838,0.08865248226950355,0.3859910581222057,0.035866780529461996,0.3,0.0
-0.23529411764705882,0.6633165829145728,0.7049180327868853,0.31313131313131315,0.0,0.41728763040238454,0.14560204953031594,0.7,0.0
-0.17647058823529413,0.7939698492462312,0.5737704918032787,0.30303030303030304,0.3877068557919622,0.5290611028315947,0.1135781383432963,0.23333333333333334,1.0
-0.0,0.6180904522613065,0.7213114754098361,0.37373737373737376,0.0,0.5245901639344264,0.05081127241673783,0.13333333333333333,0.0
-0.23529411764705882,0.4271356783919598,0.47540983606557374,0.2222222222222222,0.057919621749408984,0.41430700447093893,0.0973526900085397,0.11666666666666667,0.0
-0.0,0.4221105527638191,0.6721311475409836,0.31313131313131315,0.14775413711583923,0.5692995529061103,0.06618274978650726,0.03333333333333333,0.0
-0.0,0.7286432160804021,0.0,0.0,0.0,0.6587183308494785,0.23569598633646457,0.16666666666666666,1.0
-0.0,0.678391959798995,0.5573770491803278,0.42424242424242425,0.29550827423167847,0.6304023845007451,0.12254483347566181,0.05,1.0
-0.058823529411764705,0.6984924623115578,0.5081967213114754,0.41414141414141414,0.5673758865248227,0.6065573770491804,0.19555935098206662,0.0,0.0
-0.0,0.8693467336683417,0.639344262295082,0.32323232323232326,0.3132387706855792,0.6929955290611028,0.4615713065755764,0.6166666666666667,0.0
-0.23529411764705882,0.49748743718592964,0.5901639344262295,0.1717171717171717,0.0,0.3815201192250373,0.09222886421861655,0.11666666666666667,0.0
-0.47058823529411764,0.9748743718592965,0.6557377049180327,0.0,0.0,0.38897168405365135,0.20196413321947054,0.7666666666666667,0.0
-0.11764705882352941,0.41708542713567837,0.5327868852459017,0.2828282828282828,0.07801418439716312,0.5484351713859911,0.23526900085397098,0.05,0.0
-0.11764705882352941,0.4472361809045226,0.7377049180327869,0.30303030303030304,0.0,0.49925484351713867,0.09137489325362937,0.35,0.0
-0.23529411764705882,0.49748743718592964,0.5573770491803278,0.3838383838383838,0.0,0.488822652757079,0.028608027327070875,0.2,0.0
-0.23529411764705882,0.628140703517588,0.5737704918032787,0.18181818181818182,0.14420803782505912,0.4307004470938897,0.4551665243381724,0.4,1.0
-0.17647058823529413,0.4020100502512563,0.0,0.0,0.0,0.0,0.040990606319385135,0.016666666666666666,0.0
-0.35294117647058826,0.8341708542713567,0.6065573770491803,0.0,0.0,0.3964232488822653,0.0964987190435525,0.75,0.0
-0.29411764705882354,0.5527638190954773,0.5573770491803278,0.0,0.0,0.3874813710879285,0.09137489325362937,0.15,0.0
-0.11764705882352941,0.40703517587939697,0.5901639344262295,0.15151515151515152,0.08983451536643026,0.4485842026825634,0.20025619128949615,0.06666666666666667,0.0
-0.4117647058823529,0.9798994974874372,0.5737704918032787,0.3333333333333333,0.17139479905437352,0.3740685543964233,0.036293766011955594,0.5666666666666667,1.0
-0.35294117647058826,0.7738693467336684,0.6065573770491803,0.32323232323232326,0.2281323877068558,0.436661698956781,0.32493595217762594,0.3,0.0
-0.11764705882352941,0.5879396984924623,0.7377049180327869,0.1919191919191919,0.08392434988179669,0.37555886736214605,0.10034158838599487,0.0,0.0
-0.17647058823529413,0.4221105527638191,0.5901639344262295,0.32323232323232326,0.0,0.5543964232488824,0.0807002561912895,0.11666666666666667,0.0
-0.35294117647058826,0.0,0.5573770491803278,0.41414141414141414,0.0,0.5812220566318927,0.2771135781383433,0.3333333333333333,1.0
-0.4117647058823529,0.4723618090452261,0.5245901639344263,0.25252525252525254,0.0933806146572104,0.496274217585693,0.28181041844577287,0.3333333333333333,0.0
-0.17647058823529413,0.4824120603015075,0.639344262295082,0.3939393939393939,0.0,0.555886736214605,0.06831767719897522,0.31666666666666665,0.0
-0.5882352941176471,0.3768844221105528,0.6721311475409836,0.0,0.0,0.496274217585693,0.07899231426131512,0.2833333333333333,0.0
-0.0,0.9045226130653267,0.7377049180327869,0.26262626262626265,0.10638297872340426,0.5439642324888228,0.10076857386848846,0.23333333333333334,1.0
-0.058823529411764705,0.6532663316582915,0.4918032786885246,0.23232323232323232,0.20094562647754138,0.4262295081967214,0.26216908625106744,0.0,0.0
-0.11764705882352941,0.4221105527638191,0.4098360655737705,0.23232323232323232,0.08983451536643026,0.45305514157973176,0.38001707941929974,0.0,0.0
-0.47058823529411764,0.6030150753768844,0.639344262295082,0.0,0.0,0.37257824143070045,0.14133219470538,0.7166666666666667,0.0
-0.7058823529411765,0.4221105527638191,0.5901639344262295,0.31313131313131315,0.0,0.4426229508196722,0.09350982066609734,0.4166666666666667,1.0
-0.0,0.6984924623115578,0.5081967213114754,0.1717171717171717,0.24822695035460993,0.32935916542473925,0.055081127241673786,0.0,0.0
-0.5294117647058824,0.457286432160804,0.5573770491803278,0.0,0.0,0.36065573770491804,0.05209222886421862,0.6166666666666667,0.0
-0.11764705882352941,0.457286432160804,0.5081967213114754,0.0,0.0,0.40685543964232496,0.19086251067463705,0.016666666666666666,0.0
-0.17647058823529413,0.49748743718592964,0.4426229508196721,0.1919191919191919,0.1016548463356974,0.3815201192250373,0.032450896669513236,0.05,0.0
-0.17647058823529413,0.8190954773869347,0.5737704918032787,0.18181818181818182,0.12411347517730496,0.4709388971684054,0.08112724167378309,0.11666666666666667,1.0
-0.5294117647058824,0.7286432160804021,0.7213114754098361,0.3434343434343434,0.1950354609929078,0.451564828614009,0.2959009393680615,0.5333333333333333,1.0
-0.4117647058823529,0.628140703517588,0.7049180327868853,0.0,0.0,0.5603576751117736,0.0964987190435525,0.5,0.0
-0.7647058823529411,0.38190954773869346,0.4918032786885246,0.0,0.0,0.488822652757079,0.043552519214346705,0.3333333333333333,0.0
-0.35294117647058826,0.6482412060301508,0.7377049180327869,0.0707070707070707,0.38534278959810875,0.2921013412816692,0.215200683176772,0.65,0.0
-0.11764705882352941,0.3417085427135678,0.5737704918032787,0.32323232323232326,0.07801418439716312,0.37257824143070045,0.04654141759180188,0.06666666666666667,0.0
-0.17647058823529413,0.6231155778894473,0.6557377049180327,0.3333333333333333,0.1536643026004728,0.4947839046199703,0.0969257045260461,0.08333333333333333,0.0
-0.35294117647058826,0.5728643216080402,0.0,0.0,0.0,0.0,0.04739538855678907,0.08333333333333333,0.0
-0.5294117647058824,0.6532663316582915,0.5737704918032787,0.0,0.0,0.5096870342771983,0.24508966695132367,0.4,1.0
-0.17647058823529413,0.628140703517588,0.47540983606557374,0.0,0.0,0.4709388971684054,0.031169940222032448,0.05,0.0
-0.17647058823529413,0.4371859296482412,0.4918032786885246,0.18181818181818182,0.0,0.3248882265275708,0.15627668659265584,0.0,0.0
-0.058823529411764705,0.48743718592964824,0.5245901639344263,0.1919191919191919,0.09692671394799054,0.27123695976154993,0.09436379163108453,0.0,0.0
-0.17647058823529413,0.5829145728643216,0.6065573770491803,0.15151515151515152,0.12411347517730496,0.3919523099850969,0.01238257899231426,0.05,0.0
-0.0,0.5879396984924623,0.5409836065573771,0.31313131313131315,0.2222222222222222,0.459016393442623,0.177198975234842,0.016666666666666666,0.0
-0.0,0.5577889447236181,0.5327868852459017,0.0,0.0,0.3666169895678093,0.24850555081127243,0.16666666666666666,0.0
-0.11764705882352941,0.6130653266331658,0.4918032786885246,0.18181818181818182,0.12529550827423167,0.444113263785395,0.27284372331340734,0.016666666666666666,0.0
-0.0,0.5376884422110553,0.6229508196721312,0.0,0.0,0.6751117734724292,0.25960717335610595,0.05,0.0
-0.058823529411764705,0.4321608040201005,0.5409836065573771,0.5252525252525253,0.0768321513002364,0.6154992548435172,0.3582408198121264,0.13333333333333333,0.0
-0.35294117647058826,0.457286432160804,0.0,0.0,0.0,0.444113263785395,0.18061485909479078,0.16666666666666666,0.0
-0.058823529411764705,0.3869346733668342,0.45901639344262296,0.30303030303030304,0.06619385342789598,0.496274217585693,0.5008539709649871,0.05,0.0
-0.23529411764705882,0.6633165829145728,0.0,0.0,0.0,0.4903129657228018,0.09564474807856531,0.03333333333333333,1.0
-0.0,0.5276381909547738,0.7377049180327869,0.0,0.0,0.4411326378539494,0.05081127241673783,0.4166666666666667,0.0
-0.0,0.2864321608040201,0.4918032786885246,0.0,0.0,0.323397913561848,0.28052946199829204,0.7666666666666667,0.0
-0.0,0.6381909547738693,0.6557377049180327,0.37373737373737376,0.24822695035460993,0.5409836065573771,0.30999146029035013,0.03333333333333333,0.0
-0.17647058823529413,0.6482412060301508,0.7540983606557377,0.494949494949495,0.18321513002364065,0.5424739195230999,0.38001707941929974,0.18333333333333332,1.0
-0.47058823529411764,0.5025125628140703,0.6065573770491803,0.40404040404040403,0.2541371158392435,0.587183308494784,0.24893253629376602,0.36666666666666664,1.0
-0.17647058823529413,0.6432160804020101,0.5901639344262295,0.25252525252525254,0.22458628841607564,0.4828614008941878,0.20111016225448336,0.1,1.0
-0.5882352941176471,0.45226130653266333,0.6967213114754098,0.32323232323232326,0.0,0.5201192250372578,0.3189581554227156,0.5833333333333334,1.0
-0.23529411764705882,0.4221105527638191,0.7377049180327869,0.23232323232323232,0.06619385342789598,0.5886736214605067,0.034585824081981215,0.06666666666666667,0.0
-0.058823529411764705,0.44221105527638194,0.639344262295082,0.29292929292929293,0.08983451536643026,0.4769001490312966,0.12254483347566181,0.13333333333333333,0.0
-0.47058823529411764,0.9346733668341709,0.7377049180327869,0.35353535353535354,0.26595744680851063,0.5141579731743666,0.14730999146029033,0.26666666666666666,1.0
-0.29411764705882354,0.9396984924623115,0.6229508196721312,0.2727272727272727,0.24468085106382978,0.6497764530551416,0.40819812126387706,0.5333333333333333,1.0
-0.23529411764705882,0.6582914572864321,0.5573770491803278,0.21212121212121213,0.19621749408983452,0.49329359165424747,0.035012809564474806,0.11666666666666667,0.0
-0.058823529411764705,0.8241206030150754,0.6721311475409836,0.43434343434343436,0.07919621749408984,0.488822652757079,0.11229718189581554,0.48333333333333334,0.0
-0.23529411764705882,0.949748743718593,0.9016393442622951,0.31313131313131315,0.0,0.42473919523099857,0.25704526046114434,0.26666666666666666,0.0
-0.058823529411764705,0.5829145728643216,0.5737704918032787,0.2828282828282828,0.0,0.40834575260804773,0.053800170794193,0.0,0.0
-0.17647058823529413,0.4221105527638191,0.5573770491803278,0.30303030303030304,0.12529550827423167,0.4754098360655738,0.21904355251921434,0.06666666666666667,0.0
-0.35294117647058826,0.5728643216080402,0.7213114754098361,0.0,0.0,0.41430700447093893,0.07216054654141758,0.75,0.0
-0.058823529411764705,0.44221105527638194,0.5081967213114754,0.24242424242424243,0.05200945626477541,0.4456035767511177,0.14688300597779674,0.03333333333333333,0.0
-0.058823529411764705,0.4221105527638191,0.5245901639344263,0.23232323232323232,0.1359338061465721,0.5499254843517138,0.1678052946199829,0.11666666666666667,0.0
-0.4117647058823529,0.6231155778894473,0.5737704918032787,0.3333333333333333,0.2541371158392435,0.3800298062593145,0.035439795046968404,0.26666666666666666,0.0
-0.058823529411764705,0.48743718592964824,0.5737704918032787,0.40404040404040403,0.0,0.5678092399403876,0.059777967549103334,0.15,0.0
-0.47058823529411764,0.5527638190954773,0.6229508196721312,0.0,0.0,0.41430700447093893,0.06789069171648163,0.6166666666666667,0.0
-0.6470588235294118,0.5175879396984925,0.5573770491803278,0.40404040404040403,0.0,0.6885245901639345,0.02049530315969257,0.35,0.0
-0.6470588235294118,0.4271356783919598,0.6065573770491803,0.0,0.0,0.4485842026825634,0.09479077711357813,0.23333333333333334,0.0
-0.35294117647058826,0.628140703517588,0.6229508196721312,0.0,0.0,0.503725782414307,0.018360375747224593,0.55,1.0
-0.0,0.9949748743718593,0.5409836065573771,0.32323232323232326,0.32387706855791965,0.6154992548435172,0.18104184457728437,0.11666666666666667,1.0
-0.058823529411764705,0.4371859296482412,0.5573770491803278,0.3434343434343434,0.09101654846335698,0.5603576751117736,0.13791631084543127,0.05,0.0
-0.35294117647058826,0.49748743718592964,0.4918032786885246,0.1919191919191919,0.06382978723404255,0.4008941877794337,0.17890691716481638,0.18333333333333332,0.0
-0.0,0.457286432160804,0.6557377049180327,0.0,0.0,0.4828614008941878,0.2233134073441503,0.1,0.0
-0.11764705882352941,0.47738693467336685,0.4426229508196721,0.1414141414141414,0.10401891252955082,0.38897168405365135,0.2860802732707088,0.016666666666666666,0.0
-0.058823529411764705,0.49748743718592964,0.5901639344262295,0.30303030303030304,0.02127659574468085,0.5752608047690015,0.14261315115286077,0.0,0.0
-0.35294117647058826,0.4623115577889447,0.5081967213114754,0.32323232323232326,0.14893617021276595,0.4769001490312966,0.002988898377455169,0.4166666666666667,0.0
-0.23529411764705882,0.7738693467336684,0.5901639344262295,0.29292929292929293,0.14893617021276595,0.466467958271237,0.11101622544833475,0.26666666666666666,0.0
-0.0,0.6080402010050251,0.5409836065573771,0.30303030303030304,0.1950354609929078,0.511177347242921,0.0533731853116994,0.2,1.0
-0.17647058823529413,0.39195979899497485,0.5737704918032787,0.0,0.0,0.4843517138599106,0.08198121263877028,0.3,0.0
-0.11764705882352941,0.6532663316582915,0.7868852459016393,0.0,0.0,0.33681073025335323,0.08112724167378309,0.0,0.0
-0.17647058823529413,0.5577889447236181,0.47540983606557374,0.31313131313131315,0.05200945626477541,0.4396423248882266,0.1502988898377455,0.016666666666666666,0.0
-0.11764705882352941,0.49246231155778897,0.4918032786885246,0.1717171717171717,0.14184397163120568,0.5171385991058123,0.05123825789923143,0.016666666666666666,0.0
-0.058823529411764705,0.7185929648241206,0.7049180327868853,0.30303030303030304,0.3900709219858156,0.4485842026825634,0.3475661827497865,0.03333333333333333,0.0
-0.058823529411764705,0.5979899497487438,0.36065573770491804,0.47474747474747475,0.07446808510638298,0.5290611028315947,0.08625106746370624,0.06666666666666667,0.0
-0.35294117647058826,0.542713567839196,0.36065573770491804,0.20202020202020202,0.1536643026004728,0.35767511177347244,0.31383432963279245,0.23333333333333334,0.0
-0.11764705882352941,0.592964824120603,0.6557377049180327,0.0,0.0,0.639344262295082,0.26259607173356103,0.0,1.0
-0.5882352941176471,0.6683417085427136,0.5573770491803278,0.0,0.0,0.4023845007451565,0.07130657557643039,0.25,0.0
-0.11764705882352941,0.9899497487437185,0.5737704918032787,1.0,0.0,0.5171385991058123,0.2122117847993168,0.6833333333333333,1.0
-0.0,0.7587939698492462,0.7377049180327869,0.46464646464646464,0.0,0.6274217585692996,0.12510674637062338,0.0,1.0
-0.35294117647058826,0.5477386934673367,0.4918032786885246,0.2727272727272727,0.0,0.37257824143070045,0.05465414175918019,0.1,0.0
-0.7058823529411765,0.6080402010050251,0.639344262295082,0.1717171717171717,0.0,0.3949329359165425,0.07728437233134072,0.6833333333333333,0.0
-0.47058823529411764,0.5025125628140703,0.6229508196721312,0.0,0.0,0.5767511177347244,0.04782237403928266,0.35,0.0
-0.47058823529411764,0.6231155778894473,0.6229508196721312,0.24242424242424243,0.7092198581560284,0.4277198211624441,0.26003415883859954,0.5166666666666667,1.0
-0.058823529411764705,0.46733668341708545,0.45901639344262296,0.1111111111111111,0.0,0.33532041728763046,0.14474807856532876,0.016666666666666666,0.0
-0.47058823529411764,0.7185929648241206,0.5409836065573771,0.0,0.0,0.5201192250372578,0.021776259607173356,0.3333333333333333,1.0
-0.35294117647058826,0.5175879396984925,0.5409836065573771,0.0,0.0,0.3621460506706409,0.07301451750640478,0.13333333333333333,0.0
-0.17647058823529413,0.8844221105527639,0.7049180327868853,0.2727272727272727,0.18439716312056736,0.496274217585693,0.45943637916310837,0.5166666666666667,1.0
-0.0,0.36683417085427134,0.0,0.0,0.0,0.31445603576751124,0.11272416737830913,0.06666666666666667,0.0
-0.6470588235294118,0.5577889447236181,0.6885245901639344,0.40404040404040403,0.0,0.6974664679582713,0.3616567036720752,0.4,1.0
-0.11764705882352941,0.5628140703517588,0.639344262295082,0.5050505050505051,0.16548463356973994,0.587183308494784,0.04141759180187873,0.05,0.0
-0.17647058823529413,0.6633165829145728,0.6557377049180327,0.0,0.0,0.5126676602086438,0.13834329632792486,0.38333333333333336,1.0
-0.11764705882352941,0.4120603015075377,0.4262295081967213,0.2222222222222222,0.1359338061465721,0.42473919523099857,0.6921434671221178,0.06666666666666667,0.0
-0.35294117647058826,0.6180904522613065,0.5901639344262295,0.45454545454545453,0.2718676122931442,0.5007451564828614,0.27967549103330486,0.21666666666666667,0.0
-0.0,0.9447236180904522,0.6721311475409836,0.1414141414141414,0.2186761229314421,0.4769001490312966,0.2578992314261315,0.016666666666666666,1.0
-0.0,0.33668341708542715,0.6229508196721312,0.0,0.0,0.6751117734724292,0.04953031596925705,0.4166666666666667,0.0
-0.058823529411764705,0.4472361809045226,0.19672131147540983,0.1919191919191919,0.02955082742316785,0.41430700447093893,0.2053800170794193,0.0,0.0
-0.058823529411764705,0.8693467336683417,0.6065573770491803,0.0,0.0,0.5484351713859911,0.00426985482493595,0.2833333333333333,1.0
-0.058823529411764705,0.5477386934673367,0.3114754098360656,0.18181818181818182,0.14184397163120568,0.34426229508196726,0.14047822374039282,0.08333333333333333,0.0
-0.058823529411764705,0.542713567839196,0.7213114754098361,0.1919191919191919,0.0,0.40387481371087935,0.13748932536293765,0.05,0.0
-0.35294117647058826,0.4824120603015075,0.0,0.0,0.0,0.35320417287630407,0.04782237403928266,0.11666666666666667,0.0
-0.058823529411764705,0.6231155778894473,0.6065573770491803,0.36363636363636365,0.0,0.41430700447093893,0.009393680614859097,0.15,0.0
-0.4117647058823529,0.7537688442211056,0.639344262295082,0.29292929292929293,0.14893617021276595,0.5245901639344264,0.26216908625106744,0.55,1.0
-0.23529411764705882,0.9195979899497487,0.0,0.0,0.0,0.42324888226527574,0.057216054654141764,0.25,1.0
-0.058823529411764705,0.6231155778894473,0.4918032786885246,0.32323232323232326,0.0,0.533532041728763,0.18616567036720752,0.0,0.0
-0.058823529411764705,0.9095477386934674,0.639344262295082,0.42424242424242425,0.3463356973995272,0.5961251862891208,0.5038428693424423,0.016666666666666666,1.0
-0.058823529411764705,0.4623115577889447,0.5081967213114754,0.25252525252525254,0.04846335697399527,0.2906110283159464,0.17250213492741245,0.06666666666666667,0.0
-0.0,0.7638190954773869,0.6721311475409836,0.3939393939393939,0.3215130023640662,0.6184798807749627,0.08198121263877028,0.1,0.0
-0.058823529411764705,0.5577889447236181,0.5081967213114754,0.13131313131313133,0.21513002364066194,0.35767511177347244,0.025619128949615717,0.03333333333333333,0.0
-0.17647058823529413,0.5326633165829145,0.4426229508196721,0.21212121212121213,0.1867612293144208,0.4605067064083458,0.09137489325362937,0.05,0.0
-0.17647058823529413,0.8743718592964824,0.47540983606557374,0.2222222222222222,0.2293144208037825,0.4903129657228018,0.21989752348420152,0.25,1.0
-0.4117647058823529,0.8442211055276382,0.7213114754098361,0.42424242424242425,0.37943262411347517,0.5692995529061103,0.302732707087959,0.31666666666666665,1.0
-0.35294117647058826,0.5276381909547738,0.6557377049180327,0.2828282828282828,0.0,0.4843517138599106,0.3415883859948762,0.08333333333333333,0.0
-0.6470588235294118,0.6934673366834171,0.6065573770491803,0.26262626262626265,0.1702127659574468,0.5380029806259315,0.20452604611443212,0.48333333333333334,1.0
-0.17647058823529413,0.5326633165829145,0.5901639344262295,0.0,0.0,0.3845007451564829,0.055081127241673786,0.1,0.0
-0.35294117647058826,0.5879396984924623,0.7868852459016393,0.0,0.0,0.4277198211624441,0.033731853116994025,0.15,0.0
-0.11764705882352941,0.3417085427135678,0.5081967213114754,0.13131313131313133,0.01773049645390071,0.2995529061102832,0.07643040136635354,0.03333333333333333,0.0
-0.5294117647058824,0.5628140703517588,0.6721311475409836,0.24242424242424243,0.0,0.42026825633383014,0.5140905209222886,0.48333333333333334,1.0
-0.0,0.5979899497487438,0.0,0.0,0.0,0.4828614008941878,0.026900085397096492,0.05,1.0
-0.11764705882352941,0.5628140703517588,0.7049180327868853,0.42424242424242425,0.18912529550827423,0.5722801788375559,0.07173356105892399,0.11666666666666667,0.0
-0.11764705882352941,0.4623115577889447,0.6229508196721312,0.20202020202020202,0.0,0.36065573770491804,0.6917164816396242,0.11666666666666667,0.0
-0.35294117647058826,0.9195979899497487,0.7704918032786885,0.0,0.0,0.6080476900149031,0.5905209222886422,0.4,0.0
-0.0,0.4723618090452261,0.5737704918032787,0.2727272727272727,0.1359338061465721,0.6482861400894189,0.11485909479077709,0.0,0.0
-0.11764705882352941,0.542713567839196,0.5245901639344263,0.0,0.0,0.459016393442623,0.034158838599487616,0.0,0.0
-0.23529411764705882,0.45226130653266333,0.7213114754098361,0.47474747474747475,0.06382978723404255,0.5618479880774964,0.12126387702818103,0.13333333333333333,0.0
-0.0,0.628140703517588,0.5573770491803278,0.0,0.0,0.3681073025335321,0.05465414175918019,0.0,0.0
-0.0,0.6633165829145728,0.639344262295082,0.0,0.0,0.4828614008941878,0.13450042698548248,0.0,0.0
-0.29411764705882354,0.6432160804020101,0.6557377049180327,0.0,0.0,0.5156482861400895,0.02818104184457728,0.4,0.0
-0.23529411764705882,0.4723618090452261,0.5327868852459017,0.2222222222222222,0.0,0.3681073025335321,0.02988898377455166,0.0,0.0
-0.4117647058823529,0.5728643216080402,0.5245901639344263,0.0,0.0,0.40834575260804773,0.27924850555081127,0.21666666666666667,1.0
-0.0,0.5125628140703518,0.639344262295082,0.40404040404040403,0.10638297872340426,0.5141579731743666,0.06831767719897522,0.05,0.0
-0.11764705882352941,0.5577889447236181,0.4918032786885246,0.0,0.0,0.3904619970193741,0.11315115286080274,0.03333333333333333,0.0
-0.058823529411764705,0.6432160804020101,0.6721311475409836,0.1717171717171717,0.21631205673758866,0.4098360655737705,0.015798462852263023,0.016666666666666666,0.0
-0.5882352941176471,0.4623115577889447,0.5081967213114754,0.0,0.0,0.3859910581222057,0.038001707941929974,0.16666666666666666,0.0
-0.7647058823529411,0.5226130653266332,0.5901639344262295,0.0,0.0,0.46497764530551416,0.16524338172502134,0.2833333333333333,1.0
-0.29411764705882354,0.5226130653266332,0.6065573770491803,0.0,0.0,0.42921013412816694,0.03202391118701964,0.45,0.0
-0.11764705882352941,0.4723618090452261,0.6229508196721312,0.18181818181818182,0.07801418439716312,0.4709388971684054,0.2438087105038429,0.03333333333333333,0.0
-0.4117647058823529,0

<TRUNCATED>