You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mahout.apache.org by ss...@apache.org on 2014/05/18 23:03:02 UTC

svn commit: r1595684 [1/2] - in /mahout/trunk: ./ mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/ mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/

Author: ssc
Date: Sun May 18 21:03:02 2014
New Revision: 1595684

URL: http://svn.apache.org/r1595684
Log:
MAHOUT-1388 Add command line support and logging for MLP

Added:
    mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptron.java
    mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptron.java
    mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/Datasets.java
    mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptronTest.java
    mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptronTest.java
Modified:
    mahout/trunk/CHANGELOG
    mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/MultilayerPerceptron.java
    mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/NeuralNetwork.java
    mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestMultilayerPerceptron.java
    mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestNeuralNetwork.java

Modified: mahout/trunk/CHANGELOG
URL: http://svn.apache.org/viewvc/mahout/trunk/CHANGELOG?rev=1595684&r1=1595683&r2=1595684&view=diff
==============================================================================
--- mahout/trunk/CHANGELOG (original)
+++ mahout/trunk/CHANGELOG Sun May 18 21:03:02 2014
@@ -2,6 +2,8 @@ Mahout Change Log
 
 Release 1.0 - unreleased
 
+  MAHOUT-1388: Add command line support and logging for MLP (Yexi Jiang via ssc)
+
   MAHOUT-1498: DistributedCache.setCacheFiles in DictionaryVectorizer overwrites jars pushed using oozie (Sergey via ssc)
 
   MAHOUT-1385: Caching Encoders don't cache (Johannes Schulte, Manoj Awasthi via ssc)

Modified: mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/MultilayerPerceptron.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/MultilayerPerceptron.java?rev=1595684&r1=1595683&r2=1595684&view=diff
==============================================================================
--- mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/MultilayerPerceptron.java (original)
+++ mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/MultilayerPerceptron.java Sun May 18 21:03:02 2014
@@ -21,6 +21,8 @@ import org.apache.mahout.classifier.Onli
 import org.apache.mahout.math.DenseVector;
 import org.apache.mahout.math.Vector;
 
+import java.io.IOException;
+
 /**
  * A Multilayer Perceptron (MLP) is a kind of feed-forward artificial neural
  * network, which is a mathematical model inspired by the biological neural
@@ -54,7 +56,7 @@ public class MultilayerPerceptron extend
    * 
    * @param modelPath The path of the model.
    */
-  public MultilayerPerceptron(String modelPath) {
+  public MultilayerPerceptron(String modelPath) throws IOException {
     super(modelPath);
   }
 

Modified: mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/NeuralNetwork.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/NeuralNetwork.java?rev=1595684&r1=1595683&r2=1595684&view=diff
==============================================================================
--- mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/NeuralNetwork.java (original)
+++ mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/NeuralNetwork.java Sun May 18 21:03:02 2014
@@ -38,6 +38,8 @@ import org.apache.mahout.math.MatrixWrit
 import org.apache.mahout.math.Vector;
 import org.apache.mahout.math.function.DoubleDoubleFunction;
 import org.apache.mahout.math.function.DoubleFunction;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -49,22 +51,22 @@ import com.google.common.io.Closeables;
  * and Autoencoder consist of neurons and the weights between neurons.
  */
 public abstract class NeuralNetwork {
+  
+  private static final Logger log = LoggerFactory.getLogger(NeuralNetwork.class);
 
   /* The default learning rate */
-  private static final double DEFAULT_LEARNING_RATE = 0.5;
+  public static final double DEFAULT_LEARNING_RATE = 0.5;
   /* The default regularization weight */
-  private static final double DEFAULT_REGULARIZATION_WEIGHT = 0;
+  public static final double DEFAULT_REGULARIZATION_WEIGHT = 0;
   /* The default momentum weight */
-  private static final double DEFAULT_MOMENTUM_WEIGHT = 0.1;
+  public static final double DEFAULT_MOMENTUM_WEIGHT = 0.1;
 
-  public static enum TrainingMethod {
-    GRADIENT_DESCENT
-  }
+  public static enum TrainingMethod { GRADIENT_DESCENT }
 
-  /* the name of the model */
+  /* The name of the model */
   protected String modelType;
 
-  /* the path to store the model */
+  /* The path to store the model */
   protected String modelPath;
 
   protected double learningRate;
@@ -94,25 +96,26 @@ public abstract class NeuralNetwork {
   protected List<String> squashingFunctionList;
 
   /* The index of the final layer */
-  protected int finalLayerIdx;
+  protected int finalLayerIndex;
 
   /**
    * The default constructor that initializes the learning rate, regularization
    * weight, and momentum weight by default.
    */
   public NeuralNetwork() {
-    this.learningRate = DEFAULT_LEARNING_RATE;
-    this.regularizationWeight = DEFAULT_REGULARIZATION_WEIGHT;
-    this.momentumWeight = DEFAULT_MOMENTUM_WEIGHT;
-    this.trainingMethod = TrainingMethod.GRADIENT_DESCENT;
-    this.costFunctionName = "Minus_Squared";
-    this.modelType = this.getClass().getSimpleName();
-
-    this.layerSizeList = Lists.newArrayList();
-    this.layerSizeList = Lists.newArrayList();
-    this.weightMatrixList = Lists.newArrayList();
-    this.prevWeightUpdatesList = Lists.newArrayList();
-    this.squashingFunctionList = Lists.newArrayList();
+    log.info("Initialize model...");
+    learningRate = DEFAULT_LEARNING_RATE;
+    regularizationWeight = DEFAULT_REGULARIZATION_WEIGHT;
+    momentumWeight = DEFAULT_MOMENTUM_WEIGHT;
+    trainingMethod = TrainingMethod.GRADIENT_DESCENT;
+    costFunctionName = "Minus_Squared";
+    modelType = getClass().getSimpleName();
+
+    layerSizeList = Lists.newArrayList();
+    layerSizeList = Lists.newArrayList();
+    weightMatrixList = Lists.newArrayList();
+    prevWeightUpdatesList = Lists.newArrayList();
+    squashingFunctionList = Lists.newArrayList();
   }
 
   /**
@@ -125,9 +128,9 @@ public abstract class NeuralNetwork {
    */
   public NeuralNetwork(double learningRate, double momentumWeight, double regularizationWeight) {
     this();
-    this.setLearningRate(learningRate);
-    this.setMomentumWeight(momentumWeight);
-    this.setRegularizationWeight(regularizationWeight);
+    setLearningRate(learningRate);
+    setMomentumWeight(momentumWeight);
+    setRegularizationWeight(regularizationWeight);
   }
 
   /**
@@ -135,13 +138,9 @@ public abstract class NeuralNetwork {
    * 
    * @param modelPath The location that the model is stored.
    */
-  public NeuralNetwork(String modelPath) {
-    try {
-      this.modelPath = modelPath;
-      this.readFromModel();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
+  public NeuralNetwork(String modelPath) throws IOException {
+    this.modelPath = modelPath;
+    readFromModel();
   }
 
   /**
@@ -173,7 +172,7 @@ public abstract class NeuralNetwork {
    * @return The value of learning rate.
    */
   public double getLearningRate() {
-    return this.learningRate;
+    return learningRate;
   }
 
   /**
@@ -196,7 +195,7 @@ public abstract class NeuralNetwork {
    * @return The weight of regularization.
    */
   public double getRegularizationWeight() {
-    return this.regularizationWeight;
+    return regularizationWeight;
   }
 
   /**
@@ -218,7 +217,7 @@ public abstract class NeuralNetwork {
    * @return The value of momentum.
    */
   public double getMomentumWeight() {
-    return this.momentumWeight;
+    return momentumWeight;
   }
 
   /**
@@ -238,7 +237,7 @@ public abstract class NeuralNetwork {
    * @return The training method enumeration.
    */
   public TrainingMethod getTrainingMethod() {
-    return this.trainingMethod;
+    return trainingMethod;
   }
 
   /**
@@ -265,26 +264,25 @@ public abstract class NeuralNetwork {
    */
   public int addLayer(int size, boolean isFinalLayer, String squashingFunctionName) {
     Preconditions.checkArgument(size > 0, "Size of layer must be larger than 0.");
+    log.info("Add layer with size {} and squashing function {}", size, squashingFunctionName);
     int actualSize = size;
     if (!isFinalLayer) {
       actualSize += 1;
     }
 
-    this.layerSizeList.add(actualSize);
-    int layerIdx = this.layerSizeList.size() - 1;
+    layerSizeList.add(actualSize);
+    int layerIndex = layerSizeList.size() - 1;
     if (isFinalLayer) {
-      this.finalLayerIdx = layerIdx;
+      finalLayerIndex = layerIndex;
     }
 
-    // add weights between current layer and previous layer, and input layer has
-    // no squashing function
-    if (layerIdx > 0) {
-      int sizePrevLayer = this.layerSizeList.get(layerIdx - 1);
-      // row count equals to size of current size and column count equal to
-      // size of previous layer
+    // Add weights between current layer and previous layer, and input layer has no squashing function
+    if (layerIndex > 0) {
+      int sizePrevLayer = layerSizeList.get(layerIndex - 1);
+      // Row count equals to size of current size and column count equal to size of previous layer
       int row = isFinalLayer ? actualSize : actualSize - 1;
       Matrix weightMatrix = new DenseMatrix(row, sizePrevLayer);
-      // initialize weights
+      // Initialize weights
       final RandomWrapper rnd = RandomUtils.getRandom();
       weightMatrix.assign(new DoubleFunction() {
         @Override
@@ -292,11 +290,11 @@ public abstract class NeuralNetwork {
           return rnd.nextDouble() - 0.5;
         }
       });
-      this.weightMatrixList.add(weightMatrix);
-      this.prevWeightUpdatesList.add(new DenseMatrix(row, sizePrevLayer));
-      this.squashingFunctionList.add(squashingFunctionName);
+      weightMatrixList.add(weightMatrix);
+      prevWeightUpdatesList.add(new DenseMatrix(row, sizePrevLayer));
+      squashingFunctionList.add(squashingFunctionName);
     }
-    return layerIdx;
+    return layerIndex;
   }
 
   /**
@@ -308,7 +306,7 @@ public abstract class NeuralNetwork {
   public int getLayerSize(int layer) {
     Preconditions.checkArgument(layer >= 0 && layer < this.layerSizeList.size(),
         String.format("Input must be in range [0, %d]\n", this.layerSizeList.size() - 1));
-    return this.layerSizeList.get(layer);
+    return layerSizeList.get(layer);
   }
 
   /**
@@ -317,17 +315,17 @@ public abstract class NeuralNetwork {
    * @return The sizes of the layers.
    */
   protected List<Integer> getLayerSizeList() {
-    return this.layerSizeList;
+    return layerSizeList;
   }
 
   /**
-   * Get the weights between layer layerIdx and layerIdx + 1
+   * Get the weights between layer layerIndex and layerIndex + 1
    * 
-   * @param layerIdx The index of the layer.
+   * @param layerIndex The index of the layer.
    * @return The weights in form of {@link Matrix}.
    */
-  public Matrix getWeightsByLayer(int layerIdx) {
-    return this.weightMatrixList.get(layerIdx);
+  public Matrix getWeightsByLayer(int layerIndex) {
+    return weightMatrixList.get(layerIndex);
   }
 
   /**
@@ -338,8 +336,8 @@ public abstract class NeuralNetwork {
    */
   public void updateWeightMatrices(Matrix[] matrices) {
     for (int i = 0; i < matrices.length; ++i) {
-      Matrix matrix = this.weightMatrixList.get(i);
-      this.weightMatrixList.set(i, matrix.plus(matrices[i]));
+      Matrix matrix = weightMatrixList.get(i);
+      weightMatrixList.set(i, matrix.plus(matrices[i]));
     }
   }
 
@@ -350,8 +348,8 @@ public abstract class NeuralNetwork {
    *          existing matrices.
    */
   public void setWeightMatrices(Matrix[] matrices) {
-    this.weightMatrixList = Lists.newArrayList();
-    Collections.addAll(this.weightMatrixList, matrices);
+    weightMatrixList = Lists.newArrayList();
+    Collections.addAll(weightMatrixList, matrices);
   }
 
   /**
@@ -361,9 +359,9 @@ public abstract class NeuralNetwork {
    * @param matrix The instance of {@link Matrix}.
    */
   public void setWeightMatrix(int index, Matrix matrix) {
-    Preconditions.checkArgument(0 <= index && index < this.weightMatrixList.size(),
-        String.format("index [%s] should be in range [%s, %s).", index, 0, this.weightMatrixList.size()));
-    this.weightMatrixList.set(index, matrix);
+    Preconditions.checkArgument(0 <= index && index < weightMatrixList.size(),
+        String.format("index [%s] should be in range [%s, %s).", index, 0, weightMatrixList.size()));
+    weightMatrixList.set(index, matrix);
   }
 
   /**
@@ -372,8 +370,8 @@ public abstract class NeuralNetwork {
    * @return The weight matrices.
    */
   public Matrix[] getWeightMatrices() {
-    Matrix[] matrices = new Matrix[this.weightMatrixList.size()];
-    this.weightMatrixList.toArray(matrices);
+    Matrix[] matrices = new Matrix[weightMatrixList.size()];
+    weightMatrixList.toArray(matrices);
     return matrices;
   }
 
@@ -384,9 +382,9 @@ public abstract class NeuralNetwork {
    * @return The output vector.
    */
   public Vector getOutput(Vector instance) {
-    Preconditions.checkArgument(this.layerSizeList.get(0) == instance.size() + 1,
+    Preconditions.checkArgument(layerSizeList.get(0) == instance.size() + 1,
         String.format("The dimension of input instance should be %d, but the input has dimension %d.",
-            this.layerSizeList.get(0) - 1, instance.size()));
+            layerSizeList.get(0) - 1, instance.size()));
 
     // add bias feature
     Vector instanceWithBias = new DenseVector(instance.size() + 1);
@@ -416,7 +414,7 @@ public abstract class NeuralNetwork {
     Vector intermediateOutput = instance;
     outputCache.add(intermediateOutput);
 
-    for (int i = 0; i < this.layerSizeList.size() - 1; ++i) {
+    for (int i = 0; i < layerSizeList.size() - 1; ++i) {
       intermediateOutput = forward(i, intermediateOutput);
       outputCache.add(intermediateOutput);
     }
@@ -431,10 +429,10 @@ public abstract class NeuralNetwork {
    * @return The intermediate results of the current layer.
    */
   protected Vector forward(int fromLayer, Vector intermediateOutput) {
-    Matrix weightMatrix = this.weightMatrixList.get(fromLayer);
+    Matrix weightMatrix = weightMatrixList.get(fromLayer);
 
     Vector vec = weightMatrix.times(intermediateOutput);
-    vec = vec.assign(NeuralNetworkFunctions.getDoubleFunction(this.squashingFunctionList.get(fromLayer)));
+    vec = vec.assign(NeuralNetworkFunctions.getDoubleFunction(squashingFunctionList.get(fromLayer)));
 
     // add bias
     Vector vecWithBias = new DenseVector(vec.size() + 1);
@@ -453,8 +451,8 @@ public abstract class NeuralNetwork {
    *          of the output layer (a.k.a. the dimension of the labels).
    */
   public void trainOnline(Vector trainingInstance) {
-    Matrix[] matrices = this.trainByInstance(trainingInstance);
-    this.updateWeightMatrices(matrices);
+    Matrix[] matrices = trainByInstance(trainingInstance);
+    updateWeightMatrices(matrices);
   }
 
   /**
@@ -467,16 +465,16 @@ public abstract class NeuralNetwork {
    */
   public Matrix[] trainByInstance(Vector trainingInstance) {
     // validate training instance
-    int inputDimension = this.layerSizeList.get(0) - 1;
-    int outputDimension = this.layerSizeList.get(this.layerSizeList.size() - 1);
+    int inputDimension = layerSizeList.get(0) - 1;
+    int outputDimension = layerSizeList.get(this.layerSizeList.size() - 1);
     Preconditions.checkArgument(inputDimension + outputDimension == trainingInstance.size(),
         String.format("The dimension of training instance is %d, but requires %d.", trainingInstance.size(),
             inputDimension + outputDimension));
 
-    if (this.trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) {
-      return this.trainByInstanceGradientDescent(trainingInstance);
+    if (trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) {
+      return trainByInstanceGradientDescent(trainingInstance);
     }
-    throw new IllegalArgumentException(String.format("Training method is not supported."));
+    throw new IllegalArgumentException("Training method is not supported.");
   }
 
   /**
@@ -489,48 +487,51 @@ public abstract class NeuralNetwork {
    * @return The weight update matrices.
    */
   private Matrix[] trainByInstanceGradientDescent(Vector trainingInstance) {
-    int inputDimension = this.layerSizeList.get(0) - 1;
+    int inputDimension = layerSizeList.get(0) - 1;
 
-    Vector inputInstance = new DenseVector(this.layerSizeList.get(0));
+    Vector inputInstance = new DenseVector(layerSizeList.get(0));
     inputInstance.set(0, 1); // add bias
     for (int i = 0; i < inputDimension; ++i) {
       inputInstance.set(i + 1, trainingInstance.get(i));
     }
 
-    Vector labels = trainingInstance.viewPart(inputInstance.size() - 1, trainingInstance.size() - inputInstance.size() + 1);
+    Vector labels =
+        trainingInstance.viewPart(inputInstance.size() - 1, trainingInstance.size() - inputInstance.size() + 1);
 
     // initialize weight update matrices
-    Matrix[] weightUpdateMatrices = new Matrix[this.weightMatrixList.size()];
+    Matrix[] weightUpdateMatrices = new Matrix[weightMatrixList.size()];
     for (int m = 0; m < weightUpdateMatrices.length; ++m) {
-      weightUpdateMatrices[m] = new DenseMatrix(this.weightMatrixList.get(m).rowSize(), this.weightMatrixList.get(m).columnSize());
+      weightUpdateMatrices[m] =
+          new DenseMatrix(weightMatrixList.get(m).rowSize(), weightMatrixList.get(m).columnSize());
     }
 
-    List<Vector> internalResults = this.getOutputInternal(inputInstance);
+    List<Vector> internalResults = getOutputInternal(inputInstance);
 
-    Vector deltaVec = new DenseVector(this.layerSizeList.get(this.layerSizeList.size() - 1));
+    Vector deltaVec = new DenseVector(layerSizeList.get(layerSizeList.size() - 1));
     Vector output = internalResults.get(internalResults.size() - 1);
 
     final DoubleFunction derivativeSquashingFunction =
-        NeuralNetworkFunctions.getDerivativeDoubleFunction(this.squashingFunctionList.get(this.squashingFunctionList.size() - 1));
+        NeuralNetworkFunctions.getDerivativeDoubleFunction(squashingFunctionList.get(squashingFunctionList.size() - 1));
 
-    final DoubleDoubleFunction costFunction = NeuralNetworkFunctions.getDerivativeDoubleDoubleFunction(this.costFunctionName);
+    final DoubleDoubleFunction costFunction =
+        NeuralNetworkFunctions.getDerivativeDoubleDoubleFunction(costFunctionName);
 
-    Matrix lastWeightMatrix = this.weightMatrixList.get(this.weightMatrixList.size() - 1);
+    Matrix lastWeightMatrix = weightMatrixList.get(weightMatrixList.size() - 1);
 
     for (int i = 0; i < deltaVec.size(); ++i) {
       double costFuncDerivative = costFunction.apply(labels.get(i), output.get(i + 1));
-      // add regularization
-      costFuncDerivative += this.regularizationWeight * lastWeightMatrix.viewRow(i).zSum();
+      // Add regularization
+      costFuncDerivative += regularizationWeight * lastWeightMatrix.viewRow(i).zSum();
       deltaVec.set(i, costFuncDerivative);
       deltaVec.set(i, deltaVec.get(i) * derivativeSquashingFunction.apply(output.get(i + 1)));
     }
 
-    // start from previous layer of output layer
-    for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) {
+    // Start from previous layer of output layer
+    for (int layer = layerSizeList.size() - 2; layer >= 0; --layer) {
       deltaVec = backPropagate(layer, deltaVec, internalResults, weightUpdateMatrices[layer]);
     }
 
-    this.prevWeightUpdatesList = Arrays.asList(weightUpdateMatrices);
+    prevWeightUpdatesList = Arrays.asList(weightUpdateMatrices);
 
     return weightUpdateMatrices;
   }
@@ -540,24 +541,24 @@ public abstract class NeuralNetwork {
    * updated information will be stored in the weightUpdateMatrices, and the
    * delta of the prevLayer will be returned.
    * 
-   * @param curLayerIdx Index of current layer.
+   * @param currentLayerIndex Index of current layer.
    * @param nextLayerDelta Delta of next layer.
    * @param outputCache The output cache to store intermediate results.
    * @param weightUpdateMatrix  The weight update, in form of {@link Matrix}.
    * @return The weight updates.
    */
-  private Vector backPropagate(int curLayerIdx, Vector nextLayerDelta,
+  private Vector backPropagate(int currentLayerIndex, Vector nextLayerDelta,
                                List<Vector> outputCache, Matrix weightUpdateMatrix) {
 
-    // get layer related information
+    // Get layer related information
     final DoubleFunction derivativeSquashingFunction =
-        NeuralNetworkFunctions.getDerivativeDoubleFunction(this.squashingFunctionList.get(curLayerIdx));
-    Vector curLayerOutput = outputCache.get(curLayerIdx);
-    Matrix weightMatrix = this.weightMatrixList.get(curLayerIdx);
-    Matrix prevWeightMatrix = this.prevWeightUpdatesList.get(curLayerIdx);
+        NeuralNetworkFunctions.getDerivativeDoubleFunction(squashingFunctionList.get(currentLayerIndex));
+    Vector curLayerOutput = outputCache.get(currentLayerIndex);
+    Matrix weightMatrix = weightMatrixList.get(currentLayerIndex);
+    Matrix prevWeightMatrix = prevWeightUpdatesList.get(currentLayerIndex);
 
-    // next layer is not output layer, remove the delta of bias neuron
-    if (curLayerIdx != this.layerSizeList.size() - 2) {
+    // Next layer is not output layer, remove the delta of bias neuron
+    if (currentLayerIndex != layerSizeList.size() - 2) {
       nextLayerDelta = nextLayerDelta.viewPart(1, nextLayerDelta.size() - 1);
     }
 
@@ -570,7 +571,7 @@ public abstract class NeuralNetwork {
       }
     });
 
-    // update weights
+    // Update weights
     for (int i = 0; i < weightUpdateMatrix.rowSize(); ++i) {
       for (int j = 0; j < weightUpdateMatrix.columnSize(); ++j) {
         weightUpdateMatrix.set(i, j, -learningRate * nextLayerDelta.get(i) *
@@ -587,13 +588,14 @@ public abstract class NeuralNetwork {
    * @throws IOException
    */
   protected void readFromModel() throws IOException {
-    Preconditions.checkArgument(this.modelPath != null, "Model path has not been set.");
+    log.info("Load model from {}", modelPath);
+    Preconditions.checkArgument(modelPath != null, "Model path has not been set.");
     FSDataInputStream is = null;
     try {
-      Path path = new Path(this.modelPath);
+      Path path = new Path(modelPath);
       FileSystem fs = path.getFileSystem(new Configuration());
       is = new FSDataInputStream(fs.open(path));
-      this.readFields(is);
+      readFields(is);
     } finally {
       Closeables.close(is, true);
     }
@@ -605,13 +607,14 @@ public abstract class NeuralNetwork {
    * @throws IOException
    */
   public void writeModelToFile() throws IOException {
-    Preconditions.checkArgument(this.modelPath != null, "Model path has not been set.");
+    log.info("Write model to {}.", modelPath);
+    Preconditions.checkArgument(modelPath != null, "Model path has not been set.");
     FSDataOutputStream stream = null;
     try {
-      Path path = new Path(this.modelPath);
+      Path path = new Path(modelPath);
       FileSystem fs = path.getFileSystem(new Configuration());
       stream = fs.create(path, true);
-      this.write(stream);
+      write(stream);
     } finally {
       Closeables.close(stream, false);
     }
@@ -632,7 +635,7 @@ public abstract class NeuralNetwork {
    * @return The path of the model.
    */
   public String getModelPath() {
-    return this.modelPath;
+    return modelPath;
   }
 
   /**
@@ -642,42 +645,42 @@ public abstract class NeuralNetwork {
    * @throws IOException
    */
   public void write(DataOutput output) throws IOException {
-    // write model type
+    // Write model type
     WritableUtils.writeString(output, modelType);
-    // write learning rate
+    // Write learning rate
     output.writeDouble(learningRate);
-    // write model path
-    if (this.modelPath != null) {
+    // Write model path
+    if (modelPath != null) {
       WritableUtils.writeString(output, modelPath);
     } else {
       WritableUtils.writeString(output, "null");
     }
 
-    // write regularization weight
-    output.writeDouble(this.regularizationWeight);
-    // write momentum weight
-    output.writeDouble(this.momentumWeight);
-
-    // write cost function
-    WritableUtils.writeString(output, this.costFunctionName);
-
-    // write layer size list
-    output.writeInt(this.layerSizeList.size());
-    for (Integer aLayerSizeList : this.layerSizeList) {
+    // Write regularization weight
+    output.writeDouble(regularizationWeight);
+    // Write momentum weight
+    output.writeDouble(momentumWeight);
+
+    // Write cost function
+    WritableUtils.writeString(output, costFunctionName);
+
+    // Write layer size list
+    output.writeInt(layerSizeList.size());
+    for (Integer aLayerSizeList : layerSizeList) {
       output.writeInt(aLayerSizeList);
     }
 
-    WritableUtils.writeEnum(output, this.trainingMethod);
+    WritableUtils.writeEnum(output, trainingMethod);
 
-    // write squashing functions
-    output.writeInt(this.squashingFunctionList.size());
-    for (String aSquashingFunctionList : this.squashingFunctionList) {
+    // Write squashing functions
+    output.writeInt(squashingFunctionList.size());
+    for (String aSquashingFunctionList : squashingFunctionList) {
       WritableUtils.writeString(output, aSquashingFunctionList);
     }
 
-    // write weight matrices
+    // Write weight matrices
     output.writeInt(this.weightMatrixList.size());
-    for (Matrix aWeightMatrixList : this.weightMatrixList) {
+    for (Matrix aWeightMatrixList : weightMatrixList) {
       MatrixWritable.writeMatrix(output, aWeightMatrixList);
     }
   }
@@ -689,51 +692,51 @@ public abstract class NeuralNetwork {
    * @throws IOException
    */
   public void readFields(DataInput input) throws IOException {
-    // read model type
-    this.modelType = WritableUtils.readString(input);
-    if (!this.modelType.equals(this.getClass().getSimpleName())) {
+    // Read model type
+    modelType = WritableUtils.readString(input);
+    if (!modelType.equals(this.getClass().getSimpleName())) {
       throw new IllegalArgumentException("The specified location does not contains the valid NeuralNetwork model.");
     }
-    // read learning rate
-    this.learningRate = input.readDouble();
-    // read model path
-    this.modelPath = WritableUtils.readString(input);
-    if (this.modelPath.equals("null")) {
-      this.modelPath = null;
-    }
-
-    // read regularization weight
-    this.regularizationWeight = input.readDouble();
-    // read momentum weight
-    this.momentumWeight = input.readDouble();
+    // Read learning rate
+    learningRate = input.readDouble();
+    // Read model path
+    modelPath = WritableUtils.readString(input);
+    if (modelPath.equals("null")) {
+      modelPath = null;
+    }
+
+    // Read regularization weight
+    regularizationWeight = input.readDouble();
+    // Read momentum weight
+    momentumWeight = input.readDouble();
 
-    // read cost function
-    this.costFunctionName = WritableUtils.readString(input);
+    // Read cost function
+    costFunctionName = WritableUtils.readString(input);
 
-    // read layer size list
+    // Read layer size list
     int numLayers = input.readInt();
-    this.layerSizeList = Lists.newArrayList();
+    layerSizeList = Lists.newArrayList();
     for (int i = 0; i < numLayers; i++) {
-      this.layerSizeList.add(input.readInt());
+      layerSizeList.add(input.readInt());
     }
 
-    this.trainingMethod = WritableUtils.readEnum(input, TrainingMethod.class);
+    trainingMethod = WritableUtils.readEnum(input, TrainingMethod.class);
 
-    // read squash functions
+    // Read squash functions
     int squashingFunctionSize = input.readInt();
-    this.squashingFunctionList = Lists.newArrayList();
+    squashingFunctionList = Lists.newArrayList();
     for (int i = 0; i < squashingFunctionSize; i++) {
-      this.squashingFunctionList.add(WritableUtils.readString(input));
+      squashingFunctionList.add(WritableUtils.readString(input));
     }
 
-    // read weights and construct matrices of previous updates
+    // Read weights and construct matrices of previous updates
     int numOfMatrices = input.readInt();
-    this.weightMatrixList = Lists.newArrayList();
-    this.prevWeightUpdatesList = Lists.newArrayList();
+    weightMatrixList = Lists.newArrayList();
+    prevWeightUpdatesList = Lists.newArrayList();
     for (int i = 0; i < numOfMatrices; i++) {
       Matrix matrix = MatrixWritable.readMatrix(input);
-      this.weightMatrixList.add(matrix);
-      this.prevWeightUpdatesList.add(new DenseMatrix(matrix.rowSize(), matrix.columnSize()));
+      weightMatrixList.add(matrix);
+      prevWeightUpdatesList.add(new DenseMatrix(matrix.rowSize(), matrix.columnSize()));
     }
   }
 

Added: mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptron.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptron.java?rev=1595684&view=auto
==============================================================================
--- mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptron.java (added)
+++ mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptron.java Sun May 18 21:03:02 2014
@@ -0,0 +1,227 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mahout.classifier.mlp;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.cli2.CommandLine;
+import org.apache.commons.cli2.Group;
+import org.apache.commons.cli2.Option;
+import org.apache.commons.cli2.builder.ArgumentBuilder;
+import org.apache.commons.cli2.builder.DefaultOptionBuilder;
+import org.apache.commons.cli2.builder.GroupBuilder;
+import org.apache.commons.cli2.commandline.Parser;
+import org.apache.commons.csv.CSVUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.mahout.math.DenseVector;
+import org.apache.mahout.math.Vector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.io.Closeables;
+
+/** Run {@link MultilayerPerceptron} classification. */
+public class RunMultilayerPerceptron {
+
+  private static final Logger log = LoggerFactory.getLogger(RunMultilayerPerceptron.class);
+
+  static class Parameters {
+    String inputFilePathStr;
+    String inputFileFormat;
+    String modelFilePathStr;
+    String outputFilePathStr;
+    int columnStart;
+    int columnEnd;
+    boolean skipHeader;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    
+    Parameters parameters = new Parameters();
+    
+    if (parseArgs(args, parameters)) {
+      log.info("Load model from {}.", parameters.modelFilePathStr);
+      MultilayerPerceptron mlp = new MultilayerPerceptron(parameters.modelFilePathStr);
+
+      log.info("Topology of MLP: {}.", Arrays.toString(mlp.getLayerSizeList().toArray()));
+
+      // validate the data
+      log.info("Read the data...");
+      Path inputFilePath = new Path(parameters.inputFilePathStr);
+      FileSystem inputFS = inputFilePath.getFileSystem(new Configuration());
+      if (!inputFS.exists(inputFilePath)) {
+        log.error("Input file '{}' does not exists!", parameters.inputFilePathStr);
+        mlp.close();
+        return;
+      }
+
+      Path outputFilePath = new Path(parameters.outputFilePathStr);
+      FileSystem outputFS = inputFilePath.getFileSystem(new Configuration());
+      if (outputFS.exists(outputFilePath)) {
+        log.error("Output file '{}' already exists!", parameters.outputFilePathStr);
+        mlp.close();
+        return;
+      }
+
+      if (!parameters.inputFileFormat.equals("csv")) {
+        log.error("Currently only supports for csv format.");
+        mlp.close();
+        return; // current only supports csv format
+      }
+
+      log.info("Read from column {} to column {}.", parameters.columnStart, parameters.columnEnd);
+
+      BufferedWriter writer = null;
+      BufferedReader reader = null;
+
+      try {
+        writer = new BufferedWriter(new OutputStreamWriter(outputFS.create(outputFilePath)));
+        reader = new BufferedReader(new InputStreamReader(inputFS.open(inputFilePath)));
+        
+        String line;
+
+        if (parameters.skipHeader) {
+          reader.readLine();
+        }
+
+        while ((line = reader.readLine()) != null) {
+          String[] tokens = CSVUtils.parseLine(line);
+          double[] features = new double[Math.min(parameters.columnEnd, tokens.length) - parameters.columnStart + 1];
+
+          for (int i = parameters.columnStart, j = 0; i < Math.min(parameters.columnEnd + 1, tokens.length); ++i, ++j) {
+            features[j] = Double.parseDouble(tokens[i]);
+          }
+          Vector featureVec = new DenseVector(features);
+          Vector res = mlp.getOutput(featureVec);
+          int mostProbablyLabelIndex = res.maxValueIndex();
+          writer.write(String.valueOf(mostProbablyLabelIndex));
+        }
+        mlp.close();
+        log.info("Labeling finished.");
+      } finally {
+        Closeables.close(reader, true);
+        Closeables.close(writer, true);
+      }
+    }
+  }
+
+  /**
+   * Parse the arguments.
+   *
+   * @param args The input arguments.
+   * @param parameters  The parameters need to be filled.
+   * @return true or false
+   * @throws Exception
+   */
+  private static boolean parseArgs(String[] args, Parameters parameters) throws Exception {
+    // build the options
+    log.info("Validate and parse arguments...");
+    DefaultOptionBuilder optionBuilder = new DefaultOptionBuilder();
+    GroupBuilder groupBuilder = new GroupBuilder();
+    ArgumentBuilder argumentBuilder = new ArgumentBuilder();
+
+    Option inputFileFormatOption = optionBuilder
+        .withLongName("format")
+        .withShortName("f")
+        .withArgument(argumentBuilder.withName("file type").withDefault("csv").withMinimum(1).withMaximum(1).create())
+        .withDescription("type of input file, currently support 'csv'")
+        .create();
+
+    List<Integer> columnRangeDefault = Lists.newArrayList();
+    columnRangeDefault.add(0);
+    columnRangeDefault.add(Integer.MAX_VALUE);
+
+    Option skipHeaderOption = optionBuilder.withLongName("skipHeader")
+        .withShortName("sh").withRequired(false)
+        .withDescription("whether to skip the first row of the input file")
+        .create();
+
+    Option inputColumnRangeOption = optionBuilder
+        .withLongName("columnRange")
+        .withShortName("cr")
+        .withDescription("the column range of the input file, start from 0")
+        .withArgument(argumentBuilder.withName("range").withMinimum(2).withMaximum(2)
+            .withDefaults(columnRangeDefault).create()).create();
+
+    Group inputFileTypeGroup = groupBuilder.withOption(skipHeaderOption)
+        .withOption(inputColumnRangeOption).withOption(inputFileFormatOption)
+        .create();
+
+    Option inputOption = optionBuilder
+        .withLongName("input")
+        .withShortName("i")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withName("file path").withMinimum(1).withMaximum(1).create())
+        .withDescription("the file path of unlabelled dataset")
+        .withChildren(inputFileTypeGroup).create();
+
+    Option modelOption = optionBuilder
+        .withLongName("model")
+        .withShortName("mo")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withName("model file").withMinimum(1).withMaximum(1).create())
+        .withDescription("the file path of the model").create();
+
+    Option labelsOption = optionBuilder
+        .withLongName("labels")
+        .withShortName("labels")
+        .withArgument(argumentBuilder.withName("label-name").withMinimum(2).create())
+        .withDescription("an ordered list of label names").create();
+
+    Group labelsGroup = groupBuilder.withOption(labelsOption).create();
+
+    Option outputOption = optionBuilder
+        .withLongName("output")
+        .withShortName("o")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withConsumeRemaining("file path").withMinimum(1).withMaximum(1).create())
+        .withDescription("the file path of labelled results").withChildren(labelsGroup).create();
+
+    // parse the input
+    Parser parser = new Parser();
+    Group normalOption = groupBuilder.withOption(inputOption).withOption(modelOption).withOption(outputOption).create();
+    parser.setGroup(normalOption);
+    CommandLine commandLine = parser.parseAndHelp(args);
+    if (commandLine == null) {
+      return false;
+    }
+
+    // obtain the arguments
+    parameters.inputFilePathStr = TrainMultilayerPerceptron.getString(commandLine, inputOption);
+    parameters.inputFileFormat = TrainMultilayerPerceptron.getString(commandLine, inputFileFormatOption);
+    parameters.skipHeader = commandLine.hasOption(skipHeaderOption);
+    parameters.modelFilePathStr = TrainMultilayerPerceptron.getString(commandLine, modelOption);
+    parameters.outputFilePathStr = TrainMultilayerPerceptron.getString(commandLine, outputOption);
+
+    List<?> columnRange = commandLine.getValues(inputColumnRangeOption);
+    parameters.columnStart = Integer.parseInt(columnRange.get(0).toString());
+    parameters.columnEnd = Integer.parseInt(columnRange.get(1).toString());
+
+    return true;
+  }
+
+}

Added: mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptron.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptron.java?rev=1595684&view=auto
==============================================================================
--- mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptron.java (added)
+++ mahout/trunk/mrlegacy/src/main/java/org/apache/mahout/classifier/mlp/TrainMultilayerPerceptron.java Sun May 18 21:03:02 2014
@@ -0,0 +1,332 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.mahout.classifier.mlp;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.cli2.CommandLine;
+import org.apache.commons.cli2.Group;
+import org.apache.commons.cli2.Option;
+import org.apache.commons.cli2.builder.ArgumentBuilder;
+import org.apache.commons.cli2.builder.DefaultOptionBuilder;
+import org.apache.commons.cli2.builder.GroupBuilder;
+import org.apache.commons.cli2.commandline.Parser;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.mahout.math.Arrays;
+import org.apache.mahout.math.DenseVector;
+import org.apache.mahout.math.Vector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.io.Closeables;
+
+/** Train a {@link MultilayerPerceptron}. */
+public final class TrainMultilayerPerceptron {
+
+  private static final Logger log = LoggerFactory.getLogger(TrainMultilayerPerceptron.class);
+  
+  /**  The parameters used by MLP. */
+  static class Parameters {
+    double learningRate;
+    double momemtumWeight;
+    double regularizationWeight;
+
+    String inputFilePath;
+    boolean skipHeader;
+    Map<String, Integer> labelsIndex = Maps.newHashMap();
+
+    String modelFilePath;
+    boolean updateModel;
+    List<Integer> layerSizeList = Lists.newArrayList();
+    String squashingFunctionName;
+  }
+
+  /*
+  private double learningRate;
+  private double momemtumWeight;
+  private double regularizationWeight;
+
+  private String inputFilePath;
+  private boolean skipHeader;
+  private Map<String, Integer> labelsIndex = Maps.newHashMap();
+
+  private String modelFilePath;
+  private boolean updateModel;
+  private List<Integer> layerSizeList = Lists.newArrayList();
+  private String squashingFunctionName;*/
+
+  public static void main(String[] args) throws Exception {
+    Parameters parameters = new Parameters();
+    
+    if (parseArgs(args, parameters)) {
+      log.info("Validate model...");
+      // check whether the model already exists
+      Path modelPath = new Path(parameters.modelFilePath);
+      FileSystem modelFs = modelPath.getFileSystem(new Configuration());
+      MultilayerPerceptron mlp;
+
+      if (modelFs.exists(modelPath) && parameters.updateModel) {
+        // incrementally update existing model
+        log.info("Build model from existing model...");
+        mlp = new MultilayerPerceptron(parameters.modelFilePath);
+      } else {
+        if (modelFs.exists(modelPath)) {
+          modelFs.delete(modelPath, true); // delete the existing file
+        }
+        log.info("Build model from scratch...");
+        mlp = new MultilayerPerceptron();
+        for (int i = 0; i < parameters.layerSizeList.size(); ++i) {
+          if (i != parameters.layerSizeList.size() - 1) {
+            mlp.addLayer(parameters.layerSizeList.get(i), false, parameters.squashingFunctionName);
+          } else {
+            mlp.addLayer(parameters.layerSizeList.get(i), true, parameters.squashingFunctionName);
+          }
+          mlp.setCostFunction("Minus_Squared");
+          mlp.setLearningRate(parameters.learningRate)
+             .setMomentumWeight(parameters.momemtumWeight)
+             .setRegularizationWeight(parameters.regularizationWeight);
+        }
+        mlp.setModelPath(parameters.modelFilePath);
+      }
+
+      // set the parameters
+      mlp.setLearningRate(parameters.learningRate)
+         .setMomentumWeight(parameters.momemtumWeight)
+         .setRegularizationWeight(parameters.regularizationWeight);
+
+      // train by the training data
+      Path trainingDataPath = new Path(parameters.inputFilePath);
+      FileSystem dataFs = trainingDataPath.getFileSystem(new Configuration());
+
+      Preconditions.checkArgument(dataFs.exists(trainingDataPath), "Training dataset %s cannot be found!",
+                                  parameters.inputFilePath);
+
+      log.info("Read data and train model...");
+      BufferedReader reader = null;
+
+      try {
+        reader = new BufferedReader(new InputStreamReader(dataFs.open(trainingDataPath)));
+        String line;
+
+        // read training data line by line
+        if (parameters.skipHeader) {
+          reader.readLine();
+        }
+
+        int labelDimension = parameters.labelsIndex.size();
+        while ((line = reader.readLine()) != null) {
+          String[] token = line.split(",");
+          String label = token[token.length - 1];
+          int labelIndex = parameters.labelsIndex.get(label);
+
+          double[] instances = new double[token.length - 1 + labelDimension];
+          for (int i = 0; i < token.length - 1; ++i) {
+            instances[i] = Double.parseDouble(token[i]);
+          }
+          for (int i = 0; i < labelDimension; ++i) {
+            instances[token.length - 1 + i] = 0;
+          }
+          // set the corresponding dimension
+          instances[token.length - 1 + labelIndex] = 1;
+
+          Vector instance = new DenseVector(instances).viewPart(0, instances.length);
+          mlp.trainOnline(instance);
+        }
+
+        // write model back
+        log.info("Write trained model to {}", parameters.modelFilePath);
+        mlp.writeModelToFile();
+        mlp.close();
+      } finally {
+        Closeables.close(reader, true);
+      }
+    }
+  }
+
+  /**
+   * Parse the input arguments.
+   * 
+   * @param args The input arguments
+   * @param parameters The parameters parsed.
+   * @return Whether the input arguments are valid.
+   * @throws Exception
+   */
+  private static boolean parseArgs(String[] args, Parameters parameters) throws Exception {
+    // build the options
+    log.info("Validate and parse arguments...");
+    DefaultOptionBuilder optionBuilder = new DefaultOptionBuilder();
+    GroupBuilder groupBuilder = new GroupBuilder();
+    ArgumentBuilder argumentBuilder = new ArgumentBuilder();
+
+    // whether skip the first row of the input file
+    Option skipHeaderOption = optionBuilder.withLongName("skipHeader")
+        .withShortName("sh").create();
+
+    Group skipHeaderGroup = groupBuilder.withOption(skipHeaderOption).create();
+
+    Option inputOption = optionBuilder
+        .withLongName("input")
+        .withShortName("i")
+        .withRequired(true)
+        .withChildren(skipHeaderGroup)
+        .withArgument(argumentBuilder.withName("path").withMinimum(1).withMaximum(1)
+                .create()).withDescription("the file path of training dataset")
+        .create();
+
+    Option labelsOption = optionBuilder
+        .withLongName("labels")
+        .withShortName("labels")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withName("label-name").withMinimum(2).create())
+        .withDescription("label names").create();
+
+    Option updateOption = optionBuilder
+        .withLongName("update")
+        .withShortName("u")
+        .withDescription("whether to incrementally update model if the model exists")
+        .create();
+
+    Group modelUpdateGroup = groupBuilder.withOption(updateOption).create();
+
+    Option modelOption = optionBuilder
+        .withLongName("model")
+        .withShortName("mo")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withName("model-path").withMinimum(1).withMaximum(1).create())
+        .withDescription("the path to store the trained model")
+        .withChildren(modelUpdateGroup).create();
+
+    Option layerSizeOption = optionBuilder
+        .withLongName("layerSize")
+        .withShortName("ls")
+        .withRequired(true)
+        .withArgument(argumentBuilder.withName("size of layer").withMinimum(2).withMaximum(5).create())
+        .withDescription("the size of each layer").create();
+
+    Option squashingFunctionOption = optionBuilder
+        .withLongName("squashingFunction")
+        .withShortName("sf")
+        .withArgument(argumentBuilder.withName("squashing function").withMinimum(1).withMaximum(1)
+            .withDefault("Sigmoid").create())
+        .withDescription("the name of squashing function (currently only supports Sigmoid)")
+        .create();
+
+    Option learningRateOption = optionBuilder
+        .withLongName("learningRate")
+        .withShortName("l")
+        .withArgument(argumentBuilder.withName("learning rate").withMaximum(1)
+            .withMinimum(1).withDefault(NeuralNetwork.DEFAULT_LEARNING_RATE).create())
+        .withDescription("learning rate").create();
+
+    Option momemtumOption = optionBuilder
+        .withLongName("momemtumWeight")
+        .withShortName("m")
+        .withArgument(argumentBuilder.withName("momemtum weight").withMaximum(1)
+            .withMinimum(1).withDefault(NeuralNetwork.DEFAULT_MOMENTUM_WEIGHT).create())
+        .withDescription("momemtum weight").create();
+
+    Option regularizationOption = optionBuilder
+        .withLongName("regularizationWeight")
+        .withShortName("r")
+        .withArgument(argumentBuilder.withName("regularization weight").withMaximum(1)
+            .withMinimum(1).withDefault(NeuralNetwork.DEFAULT_REGULARIZATION_WEIGHT).create())
+        .withDescription("regularization weight").create();
+
+    // parse the input
+    Parser parser = new Parser();
+    Group normalOptions = groupBuilder.withOption(inputOption)
+        .withOption(skipHeaderOption).withOption(updateOption)
+        .withOption(labelsOption).withOption(modelOption)
+        .withOption(layerSizeOption).withOption(squashingFunctionOption)
+        .withOption(learningRateOption).withOption(momemtumOption)
+        .withOption(regularizationOption).create();
+
+    parser.setGroup(normalOptions);
+
+    CommandLine commandLine = parser.parseAndHelp(args);
+    if (commandLine == null) {
+      return false;
+    }
+
+    parameters.learningRate = getDouble(commandLine, learningRateOption);
+    parameters.momemtumWeight = getDouble(commandLine, momemtumOption);
+    parameters.regularizationWeight = getDouble(commandLine, regularizationOption);
+
+    parameters.inputFilePath = getString(commandLine, inputOption);
+    parameters.skipHeader = commandLine.hasOption(skipHeaderOption);
+
+    List<String> labelsList = getStringList(commandLine, labelsOption);
+    int currentIndex = 0;
+    for (String label : labelsList) {
+      parameters.labelsIndex.put(label, currentIndex++);
+    }
+
+    parameters.modelFilePath = getString(commandLine, modelOption);
+    parameters.updateModel = commandLine.hasOption(updateOption);
+
+    parameters.layerSizeList = getIntegerList(commandLine, layerSizeOption);
+
+    parameters.squashingFunctionName = getString(commandLine, squashingFunctionOption);
+
+    System.out.printf("Input: %s, Model: %s, Update: %s, Layer size: %s, Squashing function: %s, Learning rate: %f," +
+        " Momemtum weight: %f, Regularization Weight: %f\n", parameters.inputFilePath, parameters.modelFilePath, 
+        parameters.updateModel, Arrays.toString(parameters.layerSizeList.toArray()), 
+        parameters.squashingFunctionName, parameters.learningRate, parameters.momemtumWeight, 
+        parameters.regularizationWeight);
+
+    return true;
+  }
+
+  static Double getDouble(CommandLine commandLine, Option option) {
+    Object val = commandLine.getValue(option);
+    if (val != null) {
+      return Double.parseDouble(val.toString());
+    }
+    return null;
+  }
+
+  static String getString(CommandLine commandLine, Option option) {
+    Object val = commandLine.getValue(option);
+    if (val != null) {
+      return val.toString();
+    }
+    return null;
+  }
+
+  static List<Integer> getIntegerList(CommandLine commandLine, Option option) {
+    List<String> list = commandLine.getValues(option);
+    List<Integer> valList = Lists.newArrayList();
+    for (String str : list) {
+      valList.add(Integer.parseInt(str));
+    }
+    return valList;
+  }
+
+  static List<String> getStringList(CommandLine commandLine, Option option) {
+    return commandLine.getValues(option);
+  }
+
+}
\ No newline at end of file

Added: mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/Datasets.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/Datasets.java?rev=1595684&view=auto
==============================================================================
--- mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/Datasets.java (added)
+++ mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/Datasets.java Sun May 18 21:03:02 2014
@@ -0,0 +1,866 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mahout.classifier.mlp;
+
+public class Datasets {
+
+  public static final String[] IRIS = new String[] {
+      "Sepal.Length,Sepal.Width,Petal.Length,Petal.Width,Species",
+      "5.1,3.5,1.4,0.2,setosa",
+      "4.9,3.0,1.4,0.2,setosa",
+      "4.7,3.2,1.3,0.2,setosa",
+      "4.6,3.1,1.5,0.2,setosa",
+      "5.0,3.6,1.4,0.2,setosa",
+      "5.4,3.9,1.7,0.4,setosa",
+      "4.6,3.4,1.4,0.3,setosa",
+      "5.0,3.4,1.5,0.2,setosa",
+      "4.4,2.9,1.4,0.2,setosa",
+      "4.9,3.1,1.5,0.1,setosa",
+      "5.4,3.7,1.5,0.2,setosa",
+      "4.8,3.4,1.6,0.2,setosa",
+      "4.8,3.0,1.4,0.1,setosa",
+      "4.3,3.0,1.1,0.1,setosa",
+      "5.8,4.0,1.2,0.2,setosa",
+      "5.7,4.4,1.5,0.4,setosa",
+      "5.4,3.9,1.3,0.4,setosa",
+      "5.1,3.5,1.4,0.3,setosa",
+      "5.7,3.8,1.7,0.3,setosa",
+      "5.1,3.8,1.5,0.3,setosa",
+      "5.4,3.4,1.7,0.2,setosa",
+      "5.1,3.7,1.5,0.4,setosa",
+      "4.6,3.6,1.0,0.2,setosa",
+      "5.1,3.3,1.7,0.5,setosa",
+      "4.8,3.4,1.9,0.2,setosa",
+      "5.0,3.0,1.6,0.2,setosa",
+      "5.0,3.4,1.6,0.4,setosa",
+      "5.2,3.5,1.5,0.2,setosa",
+      "5.2,3.4,1.4,0.2,setosa",
+      "4.7,3.2,1.6,0.2,setosa",
+      "4.8,3.1,1.6,0.2,setosa",
+      "5.4,3.4,1.5,0.4,setosa",
+      "5.2,4.1,1.5,0.1,setosa",
+      "5.5,4.2,1.4,0.2,setosa",
+      "4.9,3.1,1.5,0.2,setosa",
+      "5.0,3.2,1.2,0.2,setosa",
+      "5.5,3.5,1.3,0.2,setosa",
+      "4.9,3.6,1.4,0.1,setosa",
+      "4.4,3.0,1.3,0.2,setosa",
+      "5.1,3.4,1.5,0.2,setosa",
+      "5.0,3.5,1.3,0.3,setosa",
+      "4.5,2.3,1.3,0.3,setosa",
+      "4.4,3.2,1.3,0.2,setosa",
+      "5.0,3.5,1.6,0.6,setosa",
+      "5.1,3.8,1.9,0.4,setosa",
+      "4.8,3.0,1.4,0.3,setosa",
+      "5.1,3.8,1.6,0.2,setosa",
+      "4.6,3.2,1.4,0.2,setosa",
+      "5.3,3.7,1.5,0.2,setosa",
+      "5.0,3.3,1.4,0.2,setosa",
+      "7.0,3.2,4.7,1.4,versicolor",
+      "6.4,3.2,4.5,1.5,versicolor",
+      "6.9,3.1,4.9,1.5,versicolor",
+      "5.5,2.3,4.0,1.3,versicolor",
+      "6.5,2.8,4.6,1.5,versicolor",
+      "5.7,2.8,4.5,1.3,versicolor",
+      "6.3,3.3,4.7,1.6,versicolor",
+      "4.9,2.4,3.3,1.0,versicolor",
+      "6.6,2.9,4.6,1.3,versicolor",
+      "5.2,2.7,3.9,1.4,versicolor",
+      "5.0,2.0,3.5,1.0,versicolor",
+      "5.9,3.0,4.2,1.5,versicolor",
+      "6.0,2.2,4.0,1.0,versicolor",
+      "6.1,2.9,4.7,1.4,versicolor",
+      "5.6,2.9,3.6,1.3,versicolor",
+      "6.7,3.1,4.4,1.4,versicolor",
+      "5.6,3.0,4.5,1.5,versicolor",
+      "5.8,2.7,4.1,1.0,versicolor",
+      "6.2,2.2,4.5,1.5,versicolor",
+      "5.6,2.5,3.9,1.1,versicolor",
+      "5.9,3.2,4.8,1.8,versicolor",
+      "6.1,2.8,4.0,1.3,versicolor",
+      "6.3,2.5,4.9,1.5,versicolor",
+      "6.1,2.8,4.7,1.2,versicolor",
+      "6.4,2.9,4.3,1.3,versicolor",
+      "6.6,3.0,4.4,1.4,versicolor",
+      "6.8,2.8,4.8,1.4,versicolor",
+      "6.7,3.0,5.0,1.7,versicolor",
+      "6.0,2.9,4.5,1.5,versicolor",
+      "5.7,2.6,3.5,1.0,versicolor",
+      "5.5,2.4,3.8,1.1,versicolor",
+      "5.5,2.4,3.7,1.0,versicolor",
+      "5.8,2.7,3.9,1.2,versicolor",
+      "6.0,2.7,5.1,1.6,versicolor",
+      "5.4,3.0,4.5,1.5,versicolor",
+      "6.0,3.4,4.5,1.6,versicolor",
+      "6.7,3.1,4.7,1.5,versicolor",
+      "6.3,2.3,4.4,1.3,versicolor",
+      "5.6,3.0,4.1,1.3,versicolor",
+      "5.5,2.5,4.0,1.3,versicolor",
+      "5.5,2.6,4.4,1.2,versicolor",
+      "6.1,3.0,4.6,1.4,versicolor",
+      "5.8,2.6,4.0,1.2,versicolor",
+      "5.0,2.3,3.3,1.0,versicolor",
+      "5.6,2.7,4.2,1.3,versicolor",
+      "5.7,3.0,4.2,1.2,versicolor",
+      "5.7,2.9,4.2,1.3,versicolor",
+      "6.2,2.9,4.3,1.3,versicolor",
+      "5.1,2.5,3.0,1.1,versicolor",
+      "5.7,2.8,4.1,1.3,versicolor",
+      "6.3,3.3,6.0,2.5,virginica",
+      "5.8,2.7,5.1,1.9,virginica",
+      "7.1,3.0,5.9,2.1,virginica",
+      "6.3,2.9,5.6,1.8,virginica",
+      "6.5,3.0,5.8,2.2,virginica",
+      "7.6,3.0,6.6,2.1,virginica",
+      "4.9,2.5,4.5,1.7,virginica",
+      "7.3,2.9,6.3,1.8,virginica",
+      "6.7,2.5,5.8,1.8,virginica",
+      "7.2,3.6,6.1,2.5,virginica",
+      "6.5,3.2,5.1,2.0,virginica",
+      "6.4,2.7,5.3,1.9,virginica",
+      "6.8,3.0,5.5,2.1,virginica",
+      "5.7,2.5,5.0,2.0,virginica",
+      "5.8,2.8,5.1,2.4,virginica",
+      "6.4,3.2,5.3,2.3,virginica",
+      "6.5,3.0,5.5,1.8,virginica",
+      "7.7,3.8,6.7,2.2,virginica",
+      "7.7,2.6,6.9,2.3,virginica",
+      "6.0,2.2,5.0,1.5,virginica",
+      "6.9,3.2,5.7,2.3,virginica",
+      "5.6,2.8,4.9,2.0,virginica",
+      "7.7,2.8,6.7,2.0,virginica",
+      "6.3,2.7,4.9,1.8,virginica",
+      "6.7,3.3,5.7,2.1,virginica",
+      "7.2,3.2,6.0,1.8,virginica",
+      "6.2,2.8,4.8,1.8,virginica",
+      "6.1,3.0,4.9,1.8,virginica",
+      "6.4,2.8,5.6,2.1,virginica",
+      "7.2,3.0,5.8,1.6,virginica",
+      "7.4,2.8,6.1,1.9,virginica",
+      "7.9,3.8,6.4,2.0,virginica",
+      "6.4,2.8,5.6,2.2,virginica",
+      "6.3,2.8,5.1,1.5,virginica",
+      "6.1,2.6,5.6,1.4,virginica",
+      "7.7,3.0,6.1,2.3,virginica",
+      "6.3,3.4,5.6,2.4,virginica",
+      "6.4,3.1,5.5,1.8,virginica",
+      "6.0,3.0,4.8,1.8,virginica",
+      "6.9,3.1,5.4,2.1,virginica",
+      "6.7,3.1,5.6,2.4,virginica",
+      "6.9,3.1,5.1,2.3,virginica",
+      "5.8,2.7,5.1,1.9,virginica",
+      "6.8,3.2,5.9,2.3,virginica",
+      "6.7,3.3,5.7,2.5,virginica",
+      "6.7,3.0,5.2,2.3,virginica",
+      "6.3,2.5,5.0,1.9,virginica",
+      "6.5,3.0,5.2,2.0,virginica",
+      "6.2,3.4,5.4,2.3,virginica",
+      "5.9,3.0,5.1,1.8,virginica"
+  };
+
+  public static final String[] CANCER = new String[] {
+      "\"V1\",\"V2\",\"V3\",\"V4\",\"V5\",\"V6\",\"V7\",\"V8\",\"V9\",\"target\"",
+      "5,1,1,1,2,1,3,1,1,0",
+      "5,4,4,5,7,10,3,2,1,0",
+      "3,1,1,1,2,2,3,1,1,0",
+      "6,8,8,1,3,4,3,7,1,0",
+      "4,1,1,3,2,1,3,1,1,0",
+      "8,10,10,8,7,10,9,7,1,1",
+      "1,1,1,1,2,10,3,1,1,0",
+      "2,1,2,1,2,1,3,1,1,0",
+      "2,1,1,1,2,1,1,1,5,0",
+      "4,2,1,1,2,1,2,1,1,0",
+      "1,1,1,1,1,1,3,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "5,3,3,3,2,3,4,4,1,1",
+      "1,1,1,1,2,3,3,1,1,0",
+      "8,7,5,10,7,9,5,5,4,1",
+      "7,4,6,4,6,1,4,3,1,1",
+      "4,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "10,7,7,6,4,10,4,1,2,1",
+      "6,1,1,1,2,1,3,1,1,0",
+      "7,3,2,10,5,10,5,4,4,1",
+      "10,5,5,3,6,7,7,10,1,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "5,2,3,4,2,7,3,6,1,1",
+      "3,2,1,1,1,1,2,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "1,1,3,1,2,1,1,1,1,0",
+      "3,1,1,1,1,1,2,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "10,7,7,3,8,5,7,4,3,1",
+      "2,1,1,2,2,1,3,1,1,0",
+      "3,1,2,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "10,10,10,8,6,1,8,9,1,1",
+      "6,2,1,1,1,1,7,1,1,0",
+      "5,4,4,9,2,10,5,6,1,1",
+      "2,5,3,3,6,7,7,5,1,1",
+      "10,4,3,1,3,3,6,5,2,1",
+      "6,10,10,2,8,10,7,3,3,1",
+      "5,6,5,6,10,1,3,1,1,1",
+      "10,10,10,4,8,1,8,10,1,1",
+      "1,1,1,1,2,1,2,1,2,0",
+      "3,7,7,4,4,9,4,8,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "4,1,1,3,2,1,3,1,1,0",
+      "7,8,7,2,4,8,3,8,2,1",
+      "9,5,8,1,2,3,2,1,5,1",
+      "5,3,3,4,2,4,3,4,1,1",
+      "10,3,6,2,3,5,4,10,2,1",
+      "5,5,5,8,10,8,7,3,7,1",
+      "10,5,5,6,8,8,7,1,1,1",
+      "10,6,6,3,4,5,3,6,1,1",
+      "8,10,10,1,3,6,3,9,1,1",
+      "8,2,4,1,5,1,5,4,4,1",
+      "5,2,3,1,6,10,5,1,1,1",
+      "9,5,5,2,2,2,5,1,1,1",
+      "5,3,5,5,3,3,4,10,1,1",
+      "1,1,1,1,2,2,2,1,1,0",
+      "9,10,10,1,10,8,3,3,1,1",
+      "6,3,4,1,5,2,3,9,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "10,4,2,1,3,2,4,3,10,1",
+      "4,1,1,1,2,1,3,1,1,0",
+      "5,3,4,1,8,10,4,9,1,1",
+      "8,3,8,3,4,9,8,9,8,1",
+      "1,1,1,1,2,1,3,2,1,0",
+      "5,1,3,1,2,1,2,1,1,0",
+      "6,10,2,8,10,2,7,8,10,1",
+      "1,3,3,2,2,1,7,2,1,0",
+      "9,4,5,10,6,10,4,8,1,1",
+      "10,6,4,1,3,4,3,2,3,1",
+      "1,1,2,1,2,2,4,2,1,0",
+      "1,1,4,1,2,1,2,1,1,0",
+      "5,3,1,2,2,1,2,1,1,0",
+      "3,1,1,1,2,3,3,1,1,0",
+      "2,1,1,1,3,1,2,1,1,0",
+      "2,2,2,1,1,1,7,1,1,0",
+      "4,1,1,2,2,1,2,1,1,0",
+      "5,2,1,1,2,1,3,1,1,0",
+      "3,1,1,1,2,2,7,1,1,0",
+      "3,5,7,8,8,9,7,10,7,1",
+      "5,10,6,1,10,4,4,10,10,1",
+      "3,3,6,4,5,8,4,4,1,1",
+      "3,6,6,6,5,10,6,8,3,1",
+      "4,1,1,1,2,1,3,1,1,0",
+      "2,1,1,2,3,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "3,1,1,2,2,1,1,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "2,1,1,2,2,1,1,1,1,0",
+      "5,1,1,1,2,1,3,1,1,0",
+      "9,6,9,2,10,6,2,9,10,1",
+      "7,5,6,10,5,10,7,9,4,1",
+      "10,3,5,1,10,5,3,10,2,1",
+      "2,3,4,4,2,5,2,5,1,1",
+      "4,1,2,1,2,1,3,1,1,0",
+      "8,2,3,1,6,3,7,1,1,1",
+      "10,10,10,10,10,1,8,8,8,1",
+      "7,3,4,4,3,3,3,2,7,1",
+      "10,10,10,8,2,10,4,1,1,1",
+      "1,6,8,10,8,10,5,7,1,1",
+      "1,1,1,1,2,1,2,3,1,0",
+      "6,5,4,4,3,9,7,8,3,1",
+      "1,3,1,2,2,2,5,3,2,0",
+      "8,6,4,3,5,9,3,1,1,1",
+      "10,3,3,10,2,10,7,3,3,1",
+      "10,10,10,3,10,8,8,1,1,1",
+      "3,3,2,1,2,3,3,1,1,0",
+      "1,1,1,1,2,5,1,1,1,0",
+      "8,3,3,1,2,2,3,2,1,0",
+      "4,5,5,10,4,10,7,5,8,1",
+      "1,1,1,1,4,3,1,1,1,0",
+      "3,2,1,1,2,2,3,1,1,0",
+      "1,1,2,2,2,1,3,1,1,0",
+      "4,2,1,1,2,2,3,1,1,0",
+      "10,10,10,2,10,10,5,3,3,1",
+      "5,3,5,1,8,10,5,3,1,1",
+      "5,4,6,7,9,7,8,10,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "7,5,3,7,4,10,7,5,5,1",
+      "3,1,1,1,2,1,3,1,1,0",
+      "8,3,5,4,5,10,1,6,2,1",
+      "1,1,1,1,10,1,1,1,1,0",
+      "5,1,3,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "5,10,8,10,8,10,3,6,3,1",
+      "3,1,1,1,2,1,2,2,1,0",
+      "3,1,1,1,3,1,2,1,1,0",
+      "5,1,1,1,2,2,3,3,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "4,1,2,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "9,5,5,4,4,5,4,3,3,1",
+      "1,1,1,1,2,5,1,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "3,4,5,2,6,8,4,1,1,1",
+      "1,1,1,1,3,2,2,1,1,0",
+      "3,1,1,3,8,1,5,8,1,0",
+      "8,8,7,4,10,10,7,8,7,1",
+      "1,1,1,1,1,1,3,1,1,0",
+      "7,2,4,1,6,10,5,4,3,1",
+      "10,10,8,6,4,5,8,10,1,1",
+      "4,1,1,1,2,3,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,5,5,6,3,10,3,1,1,1",
+      "1,2,2,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "9,9,10,3,6,10,7,10,6,1",
+      "10,7,7,4,5,10,5,7,2,1",
+      "4,1,1,1,2,1,3,2,1,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "1,1,1,2,1,3,1,1,7,0",
+      "4,1,1,1,2,2,3,2,1,0",
+      "5,6,7,8,8,10,3,10,3,1",
+      "10,8,10,10,6,1,3,1,10,1",
+      "3,1,1,1,2,1,3,1,1,0",
+      "1,1,1,2,1,1,1,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "6,10,10,10,8,10,10,10,7,1",
+      "8,6,5,4,3,10,6,1,1,1",
+      "5,8,7,7,10,10,5,7,1,1",
+      "2,1,1,1,2,1,3,1,1,0",
+      "5,10,10,3,8,1,5,10,3,1",
+      "4,1,1,1,2,1,3,1,1,0",
+      "5,3,3,3,6,10,3,1,1,1",
+      "1,1,1,1,1,1,3,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "6,1,1,1,2,1,3,1,1,0",
+      "5,8,8,8,5,10,7,8,1,1",
+      "8,7,6,4,4,10,5,1,1,1",
+      "2,1,1,1,1,1,3,1,1,0",
+      "1,5,8,6,5,8,7,10,1,1",
+      "10,5,6,10,6,10,7,7,10,1",
+      "5,8,4,10,5,8,9,10,1,1",
+      "1,2,3,1,2,1,3,1,1,0",
+      "10,10,10,8,6,8,7,10,1,1",
+      "7,5,10,10,10,10,4,10,3,1",
+      "5,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "8,4,4,5,4,7,7,8,2,0",
+      "5,1,1,4,2,1,3,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "9,7,7,5,5,10,7,8,3,1",
+      "10,8,8,4,10,10,8,1,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "5,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "5,10,10,9,6,10,7,10,5,1",
+      "10,10,9,3,7,5,3,5,1,1",
+      "1,1,1,1,1,1,3,1,1,0",
+      "1,1,1,1,1,1,3,1,1,0",
+      "5,1,1,1,1,1,3,1,1,0",
+      "8,10,10,10,5,10,8,10,6,1",
+      "8,10,8,8,4,8,7,7,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "10,10,10,10,7,10,7,10,4,1",
+      "10,10,10,10,3,10,10,6,1,1",
+      "8,7,8,7,5,5,5,10,2,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "6,10,7,7,6,4,8,10,2,1",
+      "6,1,3,1,2,1,3,1,1,0",
+      "1,1,1,2,2,1,3,1,1,0",
+      "10,6,4,3,10,10,9,10,1,1",
+      "4,1,1,3,1,5,2,1,1,1",
+      "7,5,6,3,3,8,7,4,1,1",
+      "10,5,5,6,3,10,7,9,2,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "10,5,7,4,4,10,8,9,1,1",
+      "8,9,9,5,3,5,7,7,1,1",
+      "1,1,1,1,1,1,3,1,1,0",
+      "10,10,10,3,10,10,9,10,1,1",
+      "7,4,7,4,3,7,7,6,1,1",
+      "6,8,7,5,6,8,8,9,2,1",
+      "8,4,6,3,3,1,4,3,1,0",
+      "10,4,5,5,5,10,4,1,1,1",
+      "3,3,2,1,3,1,3,6,1,0",
+      "10,8,8,2,8,10,4,8,10,1",
+      "9,8,8,5,6,2,4,10,4,1",
+      "8,10,10,8,6,9,3,10,10,1",
+      "10,4,3,2,3,10,5,3,2,1",
+      "5,1,3,3,2,2,2,3,1,0",
+      "3,1,1,3,1,1,3,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,5,5,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "5,1,1,2,2,2,3,1,1,0",
+      "8,10,10,8,5,10,7,8,1,1",
+      "8,4,4,1,2,9,3,3,1,1",
+      "4,1,1,1,2,1,3,6,1,0",
+      "1,2,2,1,2,1,1,1,1,0",
+      "10,4,4,10,2,10,5,3,3,1",
+      "6,3,3,5,3,10,3,5,3,0",
+      "6,10,10,2,8,10,7,3,3,1",
+      "9,10,10,1,10,8,3,3,1,1",
+      "5,6,6,2,4,10,3,6,1,1",
+      "3,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "5,7,7,1,5,8,3,4,1,0",
+      "10,5,8,10,3,10,5,1,3,1",
+      "5,10,10,6,10,10,10,6,5,1",
+      "8,8,9,4,5,10,7,8,1,1",
+      "10,4,4,10,6,10,5,5,1,1",
+      "7,9,4,10,10,3,5,3,3,1",
+      "5,1,4,1,2,1,3,2,1,0",
+      "10,10,6,3,3,10,4,3,2,1",
+      "3,3,5,2,3,10,7,1,1,1",
+      "10,8,8,2,3,4,8,7,8,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "8,4,7,1,3,10,3,9,2,1",
+      "5,1,1,1,2,1,3,1,1,0",
+      "3,3,5,2,3,10,7,1,1,1",
+      "7,2,4,1,3,4,3,3,1,1",
+      "3,1,1,1,2,1,3,2,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "10,5,7,3,3,7,3,3,8,1",
+      "3,1,1,1,2,1,3,1,1,0",
+      "2,1,1,2,2,1,3,1,1,0",
+      "1,4,3,10,4,10,5,6,1,1",
+      "10,4,6,1,2,10,5,3,1,1",
+      "7,4,5,10,2,10,3,8,2,1",
+      "8,10,10,10,8,10,10,7,3,1",
+      "10,10,10,10,10,10,4,10,10,1",
+      "3,1,1,1,3,1,2,1,1,0",
+      "6,1,3,1,4,5,5,10,1,1",
+      "5,6,6,8,6,10,4,10,4,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "10,4,4,6,2,10,2,3,1,1",
+      "5,5,7,8,6,10,7,4,1,1",
+      "5,3,4,3,4,5,4,7,1,0",
+      "8,2,1,1,5,1,1,1,1,0",
+      "9,1,2,6,4,10,7,7,2,1",
+      "8,4,10,5,4,4,7,10,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "10,10,10,7,9,10,7,10,10,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "8,3,4,9,3,10,3,3,1,1",
+      "10,8,4,4,4,10,3,10,4,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "7,8,7,6,4,3,8,8,4,1",
+      "3,1,1,1,2,5,5,1,1,0",
+      "2,1,1,1,3,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "8,6,4,10,10,1,3,5,1,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,1,1,2,1,1,0",
+      "5,5,5,2,5,10,4,3,1,1",
+      "6,8,7,8,6,8,8,9,1,1",
+      "1,1,1,1,5,1,3,1,1,0",
+      "4,4,4,4,6,5,7,3,1,0",
+      "7,6,3,2,5,10,7,4,6,1",
+      "3,1,1,1,2,1,3,1,1,0",
+      "5,4,6,10,2,10,4,1,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "3,2,2,1,2,1,2,3,1,0",
+      "10,1,1,1,2,10,5,4,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "8,10,3,2,6,4,3,10,1,1",
+      "10,4,6,4,5,10,7,1,1,1",
+      "10,4,7,2,2,8,6,1,1,1",
+      "5,1,1,1,2,1,3,1,2,0",
+      "5,2,2,2,2,1,2,2,1,0",
+      "5,4,6,6,4,10,4,3,1,1",
+      "8,6,7,3,3,10,3,4,2,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "6,5,5,8,4,10,3,4,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,1,1,2,1,1,0",
+      "8,5,5,5,2,10,4,3,1,1",
+      "10,3,3,1,2,10,7,6,1,1",
+      "1,1,1,1,2,1,3,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "7,6,4,8,10,10,9,5,3,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,2,2,2,3,1,1,3,1,0",
+      "1,1,1,1,1,1,1,3,1,0",
+      "3,4,4,10,5,1,3,3,1,1",
+      "4,2,3,5,3,8,7,6,1,1",
+      "5,1,1,3,2,1,1,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "3,4,5,3,7,3,4,6,1,0",
+      "2,7,10,10,7,10,4,9,4,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,3,1,2,2,1,0",
+      "5,3,3,1,3,3,3,3,3,1",
+      "8,10,10,7,10,10,7,3,8,1",
+      "8,10,5,3,8,4,4,10,3,1",
+      "10,3,5,4,3,7,3,5,3,1",
+      "6,10,10,10,10,10,8,10,10,1",
+      "3,10,3,10,6,10,5,1,4,1",
+      "3,2,2,1,4,3,2,1,1,0",
+      "4,4,4,2,2,3,2,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "6,10,10,10,8,10,7,10,7,1",
+      "5,8,8,10,5,10,8,10,3,1",
+      "1,1,3,1,2,1,1,1,1,0",
+      "1,1,3,1,1,1,2,1,1,0",
+      "4,3,2,1,3,1,2,1,1,0",
+      "1,1,3,1,2,1,1,1,1,0",
+      "4,1,2,1,2,1,2,1,1,0",
+      "5,1,1,2,2,1,2,1,1,0",
+      "3,1,2,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,1,1,2,1,1,0",
+      "3,1,1,4,3,1,2,2,1,0",
+      "5,3,4,1,4,1,3,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "10,6,3,6,4,10,7,8,4,1",
+      "3,2,2,2,2,1,3,2,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "3,3,2,2,3,1,1,2,3,0",
+      "7,6,6,3,2,10,7,1,1,1",
+      "5,3,3,2,3,1,3,1,1,0",
+      "2,1,1,1,2,1,2,2,1,0",
+      "5,1,1,1,3,2,2,2,1,0",
+      "1,1,1,2,2,1,2,1,1,0",
+      "10,8,7,4,3,10,7,9,1,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,1,1,1,1,1,0",
+      "1,2,3,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "3,2,1,1,2,1,2,2,1,0",
+      "1,2,3,1,2,1,1,1,1,0",
+      "3,10,8,7,6,9,9,3,8,1",
+      "3,1,1,1,2,1,1,1,1,0",
+      "5,3,3,1,2,1,2,1,1,0",
+      "3,1,1,1,2,4,1,1,1,0",
+      "1,2,1,3,2,1,1,2,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "4,2,2,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "2,3,2,2,2,2,3,1,1,0",
+      "3,1,2,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "10,10,10,6,8,4,8,5,1,1",
+      "5,1,2,1,2,1,3,1,1,0",
+      "8,5,6,2,3,10,6,6,1,1",
+      "3,3,2,6,3,3,3,5,1,0",
+      "8,7,8,5,10,10,7,2,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "5,2,2,2,2,2,3,2,2,0",
+      "2,3,1,1,5,1,1,1,1,0",
+      "3,2,2,3,2,3,3,1,1,0",
+      "10,10,10,7,10,10,8,2,1,1",
+      "4,3,3,1,2,1,3,3,1,0",
+      "5,1,3,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "9,10,10,10,10,10,10,10,1,1",
+      "5,3,6,1,2,1,1,1,1,0",
+      "8,7,8,2,4,2,5,10,1,1",
+      "1,1,1,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "1,3,1,1,2,1,2,2,1,0",
+      "5,1,1,3,4,1,3,2,1,0",
+      "5,1,1,1,2,1,2,2,1,0",
+      "3,2,2,3,2,1,1,1,1,0",
+      "6,9,7,5,5,8,4,2,1,0",
+      "10,8,10,1,3,10,5,1,1,1",
+      "10,10,10,1,6,1,2,8,1,1",
+      "4,1,1,1,2,1,1,1,1,0",
+      "4,1,3,3,2,1,1,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "10,4,3,10,4,10,10,1,1,1",
+      "5,2,2,4,2,4,1,1,1,0",
+      "1,1,1,3,2,3,1,1,1,0",
+      "1,1,1,1,2,2,1,1,1,0",
+      "5,1,1,6,3,1,2,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,1,1,1,1,1,0",
+      "5,7,9,8,6,10,8,10,1,1",
+      "4,1,1,3,1,1,2,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "3,1,1,3,2,1,1,1,1,0",
+      "4,5,5,8,6,10,10,7,1,1",
+      "2,3,1,1,3,1,1,1,1,0",
+      "10,2,2,1,2,6,1,1,2,1",
+      "10,6,5,8,5,10,8,6,1,1",
+      "8,8,9,6,6,3,10,10,1,1",
+      "5,1,2,1,2,1,1,1,1,0",
+      "5,1,3,1,2,1,1,1,1,0",
+      "5,1,1,3,2,1,1,1,1,0",
+      "3,1,1,1,2,5,1,1,1,0",
+      "6,1,1,3,2,1,1,1,1,0",
+      "4,1,1,1,2,1,1,2,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "10,9,8,7,6,4,7,10,3,1",
+      "10,6,6,2,4,10,9,7,1,1",
+      "6,6,6,5,4,10,7,6,2,1",
+      "4,1,1,1,2,1,1,1,1,0",
+      "1,1,2,1,2,1,2,1,1,0",
+      "3,1,1,1,1,1,2,1,1,0",
+      "6,1,1,3,2,1,1,1,1,0",
+      "6,1,1,1,1,1,1,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "4,1,2,1,2,1,1,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "5,2,1,1,2,1,1,1,1,0",
+      "4,8,7,10,4,10,7,5,1,1",
+      "5,1,1,1,1,1,1,1,1,0",
+      "5,3,2,4,2,1,1,1,1,0",
+      "9,10,10,10,10,5,10,10,10,1",
+      "8,7,8,5,5,10,9,10,1,1",
+      "5,1,2,1,2,1,1,1,1,0",
+      "1,1,1,3,1,3,1,1,1,0",
+      "3,1,1,1,1,1,2,1,1,0",
+      "10,10,10,10,6,10,8,1,5,1",
+      "3,6,4,10,3,3,3,4,1,1",
+      "6,3,2,1,3,4,4,1,1,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,8,9,4,3,10,7,1,1,1",
+      "4,1,1,1,1,1,2,1,1,0",
+      "5,10,10,10,6,10,6,5,2,1",
+      "5,1,2,10,4,5,2,1,1,0",
+      "3,1,1,1,1,1,2,1,1,0",
+      "1,1,1,1,1,1,1,1,1,0",
+      "4,2,1,1,2,1,1,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "6,1,1,1,2,1,3,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "4,1,1,2,2,1,2,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "3,3,1,1,2,1,1,1,1,0",
+      "8,10,10,10,7,5,4,8,7,1",
+      "1,1,1,1,2,4,1,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,1,1,2,1,1,0",
+      "6,6,7,10,3,10,8,10,2,1",
+      "4,10,4,7,3,10,9,10,1,1",
+      "1,1,1,1,1,1,1,1,1,0",
+      "1,1,1,1,1,1,2,1,1,0",
+      "3,1,2,2,2,1,1,1,1,0",
+      "4,7,8,3,4,10,9,1,1,1",
+      "1,1,1,1,3,1,1,1,1,0",
+      "4,1,1,1,3,1,1,1,1,0",
+      "10,4,5,4,3,5,7,3,1,1",
+      "7,5,6,10,4,10,5,3,1,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "3,1,1,2,2,1,1,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "6,1,3,2,2,1,1,1,1,0",
+      "4,1,1,1,1,1,2,1,1,0",
+      "7,4,4,3,4,10,6,9,1,1",
+      "4,2,2,1,2,1,2,1,1,0",
+      "1,1,1,1,1,1,3,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "1,1,3,2,2,1,3,1,1,0",
+      "5,1,1,1,2,1,3,1,1,0",
+      "5,1,2,1,2,1,3,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "6,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,2,2,1,1,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "5,3,1,1,2,1,1,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "2,1,3,2,2,1,2,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "6,10,10,10,4,10,7,10,1,1",
+      "2,1,1,1,1,1,1,1,1,0",
+      "3,1,1,1,1,1,1,1,1,0",
+      "7,8,3,7,4,5,7,8,2,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "3,2,2,2,2,1,4,2,1,0",
+      "4,4,2,1,2,5,2,1,2,0",
+      "3,1,1,1,2,1,1,1,1,0",
+      "4,3,1,1,2,1,4,8,1,0",
+      "5,2,2,2,1,1,2,1,1,0",
+      "5,1,1,3,2,1,1,1,1,0",
+      "2,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,3,1,1,0",
+      "5,1,1,1,2,1,3,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,2,1,3,2,1,0",
+      "5,7,10,10,5,10,10,10,1,1",
+      "3,1,2,1,2,1,3,1,1,0",
+      "4,1,1,1,2,3,2,1,1,0",
+      "8,4,4,1,6,10,2,5,2,1",
+      "10,10,8,10,6,5,10,3,1,1",
+      "8,10,4,4,8,10,8,2,1,1",
+      "7,6,10,5,3,10,9,10,2,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "10,9,7,3,4,2,7,7,1,1",
+      "5,1,2,1,2,1,3,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,3,1,1,0",
+      "5,1,2,1,2,1,2,1,1,0",
+      "5,7,10,6,5,10,7,5,1,1",
+      "6,10,5,5,4,10,6,10,1,1",
+      "3,1,1,1,2,1,1,1,1,0",
+      "5,1,1,6,3,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "8,10,10,10,6,10,10,10,1,1",
+      "5,1,1,1,2,1,2,2,1,0",
+      "9,8,8,9,6,3,4,1,1,1",
+      "5,1,1,1,2,1,1,1,1,0",
+      "4,10,8,5,4,1,10,1,1,1",
+      "2,5,7,6,4,10,7,6,1,1",
+      "10,3,4,5,3,10,4,1,1,1",
+      "5,1,2,1,2,1,1,1,1,0",
+      "4,8,6,3,4,10,7,1,1,1",
+      "5,1,1,1,2,1,2,1,1,0",
+      "4,1,2,1,2,1,2,1,1,0",
+      "5,1,3,1,2,1,3,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "5,2,4,1,1,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,1,1,2,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "5,4,6,8,4,1,8,10,1,1",
+      "5,3,2,8,5,10,8,1,2,1",
+      "10,5,10,3,5,8,7,8,3,1",
+      "4,1,1,2,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,10,10,10,10,10,10,1,1,1",
+      "5,1,1,1,2,1,1,1,1,0",
+      "10,4,3,10,3,10,7,1,2,1",
+      "5,10,10,10,5,2,8,5,1,1",
+      "8,10,10,10,6,10,10,10,10,1",
+      "2,3,1,1,2,1,2,1,1,0",
+      "2,1,1,1,1,1,2,1,1,0",
+      "4,1,3,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "6,3,3,3,3,2,6,1,1,0",
+      "7,1,2,3,2,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,1,1,2,1,1,2,1,1,0",
+      "3,1,3,1,3,4,1,1,1,0",
+      "4,6,6,5,7,6,7,7,3,1",
+      "2,1,1,1,2,5,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "6,2,3,1,2,1,1,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "8,7,4,4,5,3,5,10,1,1",
+      "3,1,1,1,2,1,1,1,1,0",
+      "3,1,4,1,2,1,1,1,1,0",
+      "10,10,7,8,7,1,10,10,3,1",
+      "4,2,4,3,2,2,2,1,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "5,1,1,3,2,1,1,1,1,0",
+      "4,1,1,3,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "1,2,2,1,2,1,1,1,1,0",
+      "1,1,1,3,2,1,1,1,1,0",
+      "5,10,10,10,10,2,10,10,10,1",
+      "3,1,1,1,2,1,2,1,1,0",
+      "3,1,1,2,3,4,1,1,1,0",
+      "1,2,1,3,2,1,2,1,1,0",
+      "5,1,1,1,2,1,2,2,1,0",
+      "4,1,1,1,2,1,2,1,1,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "3,1,1,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,2,1,1,0",
+      "5,4,5,1,8,1,3,6,1,0",
+      "7,8,8,7,3,10,7,2,3,1",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "4,1,1,1,2,1,3,1,1,0",
+      "1,1,3,1,2,1,2,1,1,0",
+      "1,1,3,1,2,1,2,1,1,0",
+      "3,1,1,3,2,1,2,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "5,2,2,2,2,1,1,1,2,0",
+      "3,1,1,1,2,1,3,1,1,0",
+      "5,7,4,1,6,1,7,10,3,1",
+      "5,10,10,8,5,5,7,10,1,1",
+      "3,10,7,8,5,8,7,4,1,1",
+      "3,2,1,2,2,1,3,1,1,0",
+      "2,1,1,1,2,1,3,1,1,0",
+      "5,3,2,1,3,1,1,1,1,0",
+      "1,1,1,1,2,1,2,1,1,0",
+      "4,1,4,1,2,1,1,1,1,0",
+      "1,1,2,1,2,1,2,1,1,0",
+      "5,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "10,10,10,10,5,10,10,10,7,1",
+      "5,10,10,10,4,10,5,6,3,1",
+      "5,1,1,1,2,1,3,2,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,3,1,0",
+      "4,1,1,1,2,1,1,1,1,0",
+      "1,1,1,1,2,1,1,1,8,0",
+      "1,1,1,3,2,1,1,1,1,0",
+      "5,10,10,5,4,5,4,4,1,1",
+      "3,1,1,1,2,1,1,1,1,0",
+      "3,1,1,1,2,1,2,1,2,0",
+      "3,1,1,1,3,2,1,1,1,0",
+      "2,1,1,1,2,1,1,1,1,0",
+      "5,10,10,3,7,3,8,10,2,1",
+      "4,8,6,4,3,4,10,6,1,1",
+      "4,8,8,5,4,5,10,4,1,1"
+  };
+
+  private Datasets() {}
+
+}

Added: mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptronTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptronTest.java?rev=1595684&view=auto
==============================================================================
--- mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptronTest.java (added)
+++ mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/RunMultilayerPerceptronTest.java Sun May 18 21:03:02 2014
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.mahout.classifier.mlp;
+
+import java.io.File;
+
+import org.apache.mahout.common.MahoutTestCase;
+import org.junit.Test;
+
+public class RunMultilayerPerceptronTest extends MahoutTestCase {
+  
+  @Test
+  public void runMultilayerPerceptron() throws Exception {
+    
+    // Train a model first
+    String modelFileName = "mlp.model";
+    File modelFile = getTestTempFile(modelFileName);
+
+    File irisDataset = getTestTempFile("iris.csv");
+    writeLines(irisDataset, Datasets.IRIS);
+
+    String[] argsTrain = {
+      "-i", irisDataset.getAbsolutePath(),
+      "-sh",
+      "-labels", "setosa", "versicolor", "virginica",
+      "-mo", modelFile.getAbsolutePath(), 
+      "-u",
+      "-ls", "4", "8", "3"
+    };
+    
+    TrainMultilayerPerceptron.main(argsTrain);
+    
+    assertTrue(modelFile.exists());
+    
+    String outputFileName = "labelResult.txt";
+    File outputFile = getTestTempFile(outputFileName);
+    
+    String[] argsLabeling = {
+        "-i", irisDataset.getAbsolutePath(),
+        "-sh",
+        "-cr", "0", "3",
+        "-mo", modelFile.getAbsolutePath(),
+        "-o", outputFile.getAbsolutePath()
+    };
+    
+    RunMultilayerPerceptron.main(argsLabeling);
+    
+    assertTrue(outputFile.exists());
+  }
+
+}

Modified: mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestMultilayerPerceptron.java
URL: http://svn.apache.org/viewvc/mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestMultilayerPerceptron.java?rev=1595684&r1=1595683&r2=1595684&view=diff
==============================================================================
--- mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestMultilayerPerceptron.java (original)
+++ mahout/trunk/mrlegacy/src/test/java/org/apache/mahout/classifier/mlp/TestMultilayerPerceptron.java Sun May 18 21:03:02 2014
@@ -85,4 +85,4 @@ public class TestMultilayerPerceptron ex
     }
     mlpCopy.close();
   }
-}
+}
\ No newline at end of file