You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@horn.apache.org by ed...@apache.org on 2015/10/29 05:27:08 UTC
[3/5] incubator-horn git commit: HORN-3: Import initial source code
from Hama ML package
http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/3779483e/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java
new file mode 100644
index 0000000..85c4b7a
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java
@@ -0,0 +1,642 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+import org.apache.horn.bsp.AbstractLayeredNeuralNetwork.LearningStyle;
+import org.apache.horn.bsp.AbstractLayeredNeuralNetwork.TrainingMethod;
+import org.apache.hama.ml.util.DefaultFeatureTransformer;
+import org.apache.hama.ml.util.FeatureTransformer;
+import org.junit.Test;
+import org.mortbay.log.Log;
+
+/**
+ * Test the functionality of SmallLayeredNeuralNetwork.
+ *
+ */
+public class TestSmallLayeredNeuralNetwork extends MLTestBase {
+
+ @Test
+ public void testReadWrite() {
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.addLayer(2, false,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.addLayer(5, false,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.addLayer(1, true,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ double learningRate = 0.2;
+ ann.setLearningRate(learningRate);
+ double momentumWeight = 0.5;
+ ann.setMomemtumWeight(momentumWeight);
+ double regularizationWeight = 0.05;
+ ann.setRegularizationWeight(regularizationWeight);
+ // intentionally initialize all weights to 0.5
+ DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
+ matrices[0] = new DenseDoubleMatrix(5, 3, 0.2);
+ matrices[1] = new DenseDoubleMatrix(1, 6, 0.8);
+ ann.setWeightMatrices(matrices);
+ ann.setLearningStyle(LearningStyle.UNSUPERVISED);
+
+ FeatureTransformer defaultFeatureTransformer = new DefaultFeatureTransformer();
+ ann.setFeatureTransformer(defaultFeatureTransformer);
+
+
+ // write to file
+ String modelPath = "/tmp/testSmallLayeredNeuralNetworkReadWrite";
+ ann.setModelPath(modelPath);
+ try {
+ ann.writeModelToFile();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ // read from file
+ SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+ assertEquals(annCopy.getClass().getSimpleName(), annCopy.getModelType());
+ assertEquals(modelPath, annCopy.getModelPath());
+ assertEquals(learningRate, annCopy.getLearningRate(), 0.000001);
+ assertEquals(momentumWeight, annCopy.getMomemtumWeight(), 0.000001);
+ assertEquals(regularizationWeight, annCopy.getRegularizationWeight(),
+ 0.000001);
+ assertEquals(TrainingMethod.GRADIENT_DESCENT, annCopy.getTrainingMethod());
+ assertEquals(LearningStyle.UNSUPERVISED, annCopy.getLearningStyle());
+
+ // compare weights
+ DoubleMatrix[] weightsMatrices = annCopy.getWeightMatrices();
+ for (int i = 0; i < weightsMatrices.length; ++i) {
+ DoubleMatrix expectMat = matrices[i];
+ DoubleMatrix actualMat = weightsMatrices[i];
+ for (int j = 0; j < expectMat.getRowCount(); ++j) {
+ for (int k = 0; k < expectMat.getColumnCount(); ++k) {
+ assertEquals(expectMat.get(j, k), actualMat.get(j, k), 0.000001);
+ }
+ }
+ }
+
+ FeatureTransformer copyTransformer = annCopy.getFeatureTransformer();
+ assertEquals(defaultFeatureTransformer.getClass().getName(), copyTransformer.getClass().getName());
+ }
+
+ @Test
+ /**
+ * Test the forward functionality.
+ */
+ public void testOutput() {
+ // first network
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.addLayer(2, false,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.addLayer(5, false,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.addLayer(1, true,
+ FunctionFactory.createDoubleFunction("IdentityFunction"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann.setLearningRate(0.1);
+ // intentionally initialize all weights to 0.5
+ DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
+ matrices[0] = new DenseDoubleMatrix(5, 3, 0.5);
+ matrices[1] = new DenseDoubleMatrix(1, 6, 0.5);
+ ann.setWeightMatrices(matrices);
+
+ double[] arr = new double[] { 0, 1 };
+ DoubleVector training = new DenseDoubleVector(arr);
+ DoubleVector result = ann.getOutput(training);
+ assertEquals(1, result.getDimension());
+ // assertEquals(3, result.get(0), 0.000001);
+
+ // second network
+ SmallLayeredNeuralNetwork ann2 = new SmallLayeredNeuralNetwork();
+ ann2.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann2.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann2.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann2.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann2.setLearningRate(0.3);
+ // intentionally initialize all weights to 0.5
+ DoubleMatrix[] matrices2 = new DenseDoubleMatrix[2];
+ matrices2[0] = new DenseDoubleMatrix(3, 3, 0.5);
+ matrices2[1] = new DenseDoubleMatrix(1, 4, 0.5);
+ ann2.setWeightMatrices(matrices2);
+
+ double[] test = { 0, 0 };
+ double[] result2 = { 0.807476 };
+
+ DoubleVector vec = ann2.getOutput(new DenseDoubleVector(test));
+ assertArrayEquals(result2, vec.toArray(), 0.000001);
+
+ SmallLayeredNeuralNetwork ann3 = new SmallLayeredNeuralNetwork();
+ ann3.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann3.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann3.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann3.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann3.setLearningRate(0.3);
+ // intentionally initialize all weights to 0.5
+ DoubleMatrix[] initMatrices = new DenseDoubleMatrix[2];
+ initMatrices[0] = new DenseDoubleMatrix(3, 3, 0.5);
+ initMatrices[1] = new DenseDoubleMatrix(1, 4, 0.5);
+ ann3.setWeightMatrices(initMatrices);
+
+ double[] instance = { 0, 1 };
+ DoubleVector output = ann3.getOutput(new DenseDoubleVector(instance));
+ assertEquals(0.8315410, output.get(0), 0.000001);
+ }
+
+ @Test
+ public void testXORlocal() {
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann.setLearningRate(0.5);
+ ann.setMomemtumWeight(0.0);
+
+ int iterations = 50000; // iteration should be set to a very large number
+ double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+ for (int i = 0; i < iterations; ++i) {
+ DoubleMatrix[] matrices = null;
+ for (int j = 0; j < instances.length; ++j) {
+ matrices = ann.trainByInstance(new DenseDoubleVector(instances[j
+ % instances.length]));
+ ann.updateWeightMatrices(matrices);
+ }
+ }
+
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = ann.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+
+ // write model into file and read out
+ String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocal";
+ ann.setModelPath(modelPath);
+ try {
+ ann.writeModelToFile();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+ // test on instances
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = annCopy.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+ }
+
+ @Test
+ public void testXORWithMomentum() {
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann.setLearningRate(0.6);
+ ann.setMomemtumWeight(0.3);
+
+ int iterations = 2000; // iteration should be set to a very large number
+ double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+ for (int i = 0; i < iterations; ++i) {
+ for (int j = 0; j < instances.length; ++j) {
+ ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
+ }
+ }
+
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = ann.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+
+ // write model into file and read out
+ String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithMomentum";
+ ann.setModelPath(modelPath);
+ try {
+ ann.writeModelToFile();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+ // test on instances
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = annCopy.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+ }
+
+ @Test
+ public void testXORLocalWithRegularization() {
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("SquaredError"));
+ ann.setLearningRate(0.7);
+ ann.setMomemtumWeight(0.5);
+ ann.setRegularizationWeight(0.002);
+
+ int iterations = 5000; // iteration should be set to a very large number
+ double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+ for (int i = 0; i < iterations; ++i) {
+ for (int j = 0; j < instances.length; ++j) {
+ ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
+ }
+ }
+
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = ann.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+
+ // write model into file and read out
+ String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithRegularization";
+ ann.setModelPath(modelPath);
+ try {
+ ann.writeModelToFile();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+ // test on instances
+ for (int i = 0; i < instances.length; ++i) {
+ DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+ // the expected output is the last element in array
+ double result = instances[i][2];
+ double actual = annCopy.getOutput(input).get(0);
+ if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+ Log.info("Neural network failes to lear the XOR.");
+ }
+ }
+ }
+
+ @Test
+ public void testTwoClassClassification() {
+ // use logistic regression data
+ String filepath = "src/test/resources/logistic_regression_data.txt";
+ List<double[]> instanceList = new ArrayList<double[]>();
+
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(filepath));
+ String line = null;
+ while ((line = br.readLine()) != null) {
+ String[] tokens = line.trim().split(",");
+ double[] instance = new double[tokens.length];
+ for (int i = 0; i < tokens.length; ++i) {
+ instance[i] = Double.parseDouble(tokens[i]);
+ }
+ instanceList.add(instance);
+ }
+ br.close();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+
+ int dimension = instanceList.get(0).length - 1;
+
+ // divide dataset into training and testing
+ List<double[]> testInstances = new ArrayList<double[]>();
+ testInstances.addAll(instanceList.subList(instanceList.size() - 100,
+ instanceList.size()));
+ List<double[]> trainingInstances = instanceList.subList(0,
+ instanceList.size() - 100);
+
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.setLearningRate(0.001);
+ ann.setMomemtumWeight(0.1);
+ ann.setRegularizationWeight(0.01);
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("CrossEntropy"));
+
+ long start = new Date().getTime();
+ int iterations = 1000;
+ for (int i = 0; i < iterations; ++i) {
+ for (double[] trainingInstance : trainingInstances) {
+ ann.trainOnline(new DenseDoubleVector(trainingInstance));
+ }
+ }
+ long end = new Date().getTime();
+ Log.info(String.format("Training time: %fs\n",
+ (double) (end - start) / 1000));
+
+ double errorRate = 0;
+ // calculate the error on test instance
+ for (double[] testInstance : testInstances) {
+ DoubleVector instance = new DenseDoubleVector(testInstance);
+ double expected = instance.get(instance.getDimension() - 1);
+ instance = instance.slice(instance.getDimension() - 1);
+ double actual = ann.getOutput(instance).get(0);
+ if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+ ++errorRate;
+ }
+ }
+ errorRate /= testInstances.size();
+
+ Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+ }
+
+ @Test
+ public void testLogisticRegression() {
+ this.testLogisticRegressionDistributedVersion();
+ this.testLogisticRegressionDistributedVersionWithFeatureTransformer();
+ }
+
+ public void testLogisticRegressionDistributedVersion() {
+ // write data into a sequence file
+ String tmpStrDatasetPath = "/tmp/logistic_regression_data";
+ Path tmpDatasetPath = new Path(tmpStrDatasetPath);
+ String strDataPath = "src/test/resources/logistic_regression_data.txt";
+ String modelPath = "/tmp/logistic-regression-distributed-model";
+
+ Configuration conf = new Configuration();
+ List<double[]> instanceList = new ArrayList<double[]>();
+ List<double[]> trainingInstances = null;
+ List<double[]> testInstances = null;
+
+ try {
+ FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
+ fs.delete(tmpDatasetPath, true);
+ if (fs.exists(tmpDatasetPath)) {
+ fs.createNewFile(tmpDatasetPath);
+ }
+
+ BufferedReader br = new BufferedReader(new FileReader(strDataPath));
+ String line = null;
+ int count = 0;
+ while ((line = br.readLine()) != null) {
+ String[] tokens = line.trim().split(",");
+ double[] instance = new double[tokens.length];
+ for (int i = 0; i < tokens.length; ++i) {
+ instance[i] = Double.parseDouble(tokens[i]);
+ }
+ instanceList.add(instance);
+ }
+ br.close();
+
+ zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+
+ // write training data to temporal sequence file
+ SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+ tmpDatasetPath, LongWritable.class, VectorWritable.class);
+ int testSize = 150;
+
+ Collections.shuffle(instanceList);
+ testInstances = new ArrayList<double[]>();
+ testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
+ instanceList.size()));
+ trainingInstances = instanceList.subList(0, instanceList.size()
+ - testSize);
+
+ for (double[] instance : trainingInstances) {
+ DoubleVector vec = new DenseDoubleVector(instance);
+ writer.append(new LongWritable(count++), new VectorWritable(vec));
+ }
+ writer.close();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ }
+
+ // create model
+ int dimension = 8;
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.setLearningRate(0.7);
+ ann.setMomemtumWeight(0.5);
+ ann.setRegularizationWeight(0.1);
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("CrossEntropy"));
+ ann.setModelPath(modelPath);
+
+ long start = new Date().getTime();
+ Map<String, String> trainingParameters = new HashMap<String, String>();
+ trainingParameters.put("tasks", "5");
+ trainingParameters.put("training.max.iterations", "2000");
+ trainingParameters.put("training.batch.size", "300");
+ trainingParameters.put("convergence.check.interval", "1000");
+ ann.train(tmpDatasetPath, trainingParameters);
+
+ long end = new Date().getTime();
+
+ // validate results
+ double errorRate = 0;
+ // calculate the error on test instance
+ for (double[] testInstance : testInstances) {
+ DoubleVector instance = new DenseDoubleVector(testInstance);
+ double expected = instance.get(instance.getDimension() - 1);
+ instance = instance.slice(instance.getDimension() - 1);
+ double actual = ann.getOutput(instance).get(0);
+ if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+ ++errorRate;
+ }
+ }
+ errorRate /= testInstances.size();
+
+ Log.info(String.format("Training time: %fs\n",
+ (double) (end - start) / 1000));
+ Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+ }
+
+ public void testLogisticRegressionDistributedVersionWithFeatureTransformer() {
+ // write data into a sequence file
+ String tmpStrDatasetPath = "/tmp/logistic_regression_data_feature_transformer";
+ Path tmpDatasetPath = new Path(tmpStrDatasetPath);
+ String strDataPath = "src/test/resources/logistic_regression_data.txt";
+ String modelPath = "/tmp/logistic-regression-distributed-model-feature-transformer";
+
+ Configuration conf = new Configuration();
+ List<double[]> instanceList = new ArrayList<double[]>();
+ List<double[]> trainingInstances = null;
+ List<double[]> testInstances = null;
+
+ try {
+ FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
+ fs.delete(tmpDatasetPath, true);
+ if (fs.exists(tmpDatasetPath)) {
+ fs.createNewFile(tmpDatasetPath);
+ }
+
+ BufferedReader br = new BufferedReader(new FileReader(strDataPath));
+ String line = null;
+ int count = 0;
+ while ((line = br.readLine()) != null) {
+ String[] tokens = line.trim().split(",");
+ double[] instance = new double[tokens.length];
+ for (int i = 0; i < tokens.length; ++i) {
+ instance[i] = Double.parseDouble(tokens[i]);
+ }
+ instanceList.add(instance);
+ }
+ br.close();
+
+ zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+
+ // write training data to temporal sequence file
+ SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+ tmpDatasetPath, LongWritable.class, VectorWritable.class);
+ int testSize = 150;
+
+ Collections.shuffle(instanceList);
+ testInstances = new ArrayList<double[]>();
+ testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
+ instanceList.size()));
+ trainingInstances = instanceList.subList(0, instanceList.size()
+ - testSize);
+
+ for (double[] instance : trainingInstances) {
+ DoubleVector vec = new DenseDoubleVector(instance);
+ writer.append(new LongWritable(count++), new VectorWritable(vec));
+ }
+ writer.close();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ }
+
+ // create model
+ int dimension = 8;
+ SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+ ann.setLearningRate(0.7);
+ ann.setMomemtumWeight(0.5);
+ ann.setRegularizationWeight(0.1);
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(dimension, false,
+ FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+ ann.setCostFunction(FunctionFactory
+ .createDoubleDoubleFunction("CrossEntropy"));
+ ann.setModelPath(modelPath);
+
+ FeatureTransformer featureTransformer = new DefaultFeatureTransformer();
+
+ ann.setFeatureTransformer(featureTransformer);
+
+ long start = new Date().getTime();
+ Map<String, String> trainingParameters = new HashMap<String, String>();
+ trainingParameters.put("tasks", "5");
+ trainingParameters.put("training.max.iterations", "2000");
+ trainingParameters.put("training.batch.size", "300");
+ trainingParameters.put("convergence.check.interval", "1000");
+ ann.train(tmpDatasetPath, trainingParameters);
+
+
+ long end = new Date().getTime();
+
+ // validate results
+ double errorRate = 0;
+ // calculate the error on test instance
+ for (double[] testInstance : testInstances) {
+ DoubleVector instance = new DenseDoubleVector(testInstance);
+ double expected = instance.get(instance.getDimension() - 1);
+ instance = instance.slice(instance.getDimension() - 1);
+ instance = featureTransformer.transform(instance);
+ double actual = ann.getOutput(instance).get(0);
+ if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+ ++errorRate;
+ }
+ }
+ errorRate /= testInstances.size();
+
+ Log.info(String.format("Training time: %fs\n",
+ (double) (end - start) / 1000));
+ Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/3779483e/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java
new file mode 100644
index 0000000..e422d95
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.junit.Test;
+
+/**
+ * Test the functionalities of SmallLayeredNeuralNetworkMessage.
+ *
+ */
+public class TestSmallLayeredNeuralNetworkMessage {
+
+ @Test
+ public void testReadWriteWithoutPrev() {
+ double error = 0.22;
+ double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
+ { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
+ double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
+ DoubleMatrix[] matrices = new DoubleMatrix[2];
+ matrices[0] = new DenseDoubleMatrix(matrix1);
+ matrices[1] = new DenseDoubleMatrix(matrix2);
+
+ boolean isConverge = false;
+
+ SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
+ error, isConverge, matrices, null);
+ Configuration conf = new Configuration();
+ String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessage";
+ Path path = new Path(strPath);
+ try {
+ FileSystem fs = FileSystem.get(new URI(strPath), conf);
+ FSDataOutputStream out = fs.create(path);
+ message.write(out);
+ out.close();
+
+ FSDataInputStream in = fs.open(path);
+ SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
+ 0, isConverge, null, null);
+ readMessage.readFields(in);
+ in.close();
+ assertEquals(error, readMessage.getTrainingError(), 0.000001);
+ assertFalse(readMessage.isConverge());
+ DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
+ assertEquals(2, readMatrices.length);
+ for (int i = 0; i < readMatrices.length; ++i) {
+ double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
+ .getValues();
+ double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
+ .getValues();
+ for (int r = 0; r < doubleMatrices.length; ++r) {
+ assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+ }
+ }
+
+ DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
+ assertNull(readPrevMatrices);
+
+ // delete
+ fs.delete(path, true);
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testReadWriteWithPrev() {
+ double error = 0.22;
+ boolean isConverge = true;
+
+ double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
+ { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
+ double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
+ DoubleMatrix[] matrices = new DoubleMatrix[2];
+ matrices[0] = new DenseDoubleMatrix(matrix1);
+ matrices[1] = new DenseDoubleMatrix(matrix2);
+
+ double[][] prevMatrix1 = new double[][] { { 0.1, 0.1, 0.2, 0.3 },
+ { 0.2, 0.4, 0.1, 0.5 }, { 0.5, 0.1, 0.5, 0.2 } };
+ double[][] prevMatrix2 = new double[][] { { 0.1, 0.2, 0.5, 0.9 },
+ { 0.3, 0.5, 0.2, 0.6 }, { 0.6, 0.8, 0.7, 0.5 } };
+
+ DoubleMatrix[] prevMatrices = new DoubleMatrix[2];
+ prevMatrices[0] = new DenseDoubleMatrix(prevMatrix1);
+ prevMatrices[1] = new DenseDoubleMatrix(prevMatrix2);
+
+ SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
+ error, isConverge, matrices, prevMatrices);
+ Configuration conf = new Configuration();
+ String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessageWithPrev";
+ Path path = new Path(strPath);
+ try {
+ FileSystem fs = FileSystem.get(new URI(strPath), conf);
+ FSDataOutputStream out = fs.create(path);
+ message.write(out);
+ out.close();
+
+ FSDataInputStream in = fs.open(path);
+ SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
+ 0, isConverge, null, null);
+ readMessage.readFields(in);
+ in.close();
+
+ assertTrue(readMessage.isConverge());
+
+ DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
+ assertEquals(2, readMatrices.length);
+ for (int i = 0; i < readMatrices.length; ++i) {
+ double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
+ .getValues();
+ double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
+ .getValues();
+ for (int r = 0; r < doubleMatrices.length; ++r) {
+ assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+ }
+ }
+
+ DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
+ assertEquals(2, readPrevMatrices.length);
+ for (int i = 0; i < readPrevMatrices.length; ++i) {
+ double[][] doubleMatrices = ((DenseDoubleMatrix) readPrevMatrices[i])
+ .getValues();
+ double[][] doubleExpected = ((DenseDoubleMatrix) prevMatrices[i])
+ .getValues();
+ for (int r = 0; r < doubleMatrices.length; ++r) {
+ assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+ }
+ }
+
+ // delete
+ fs.delete(path, true);
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/3779483e/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java b/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java
new file mode 100644
index 0000000..462140c
--- /dev/null
+++ b/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleVector;
+
+/**
+ * Test the functionality of NeuralNetwork Example.
+ *
+ */
+public class NeuralNetworkTest extends TestCase {
+ private Configuration conf = new HamaConfiguration();
+ private FileSystem fs;
+ private String MODEL_PATH = "/tmp/neuralnets.model";
+ private String RESULT_PATH = "/tmp/neuralnets.txt";
+ private String SEQTRAIN_DATA = "/tmp/test-neuralnets.data";
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ fs = FileSystem.get(conf);
+ }
+
+ public void testNeuralnetsLabeling() throws IOException {
+ this.neuralNetworkTraining();
+
+ String dataPath = "src/test/resources/neuralnets_classification_test.txt";
+ String mode = "label";
+ try {
+ NeuralNetwork
+ .main(new String[] { mode, dataPath, RESULT_PATH, MODEL_PATH });
+
+ // compare results with ground-truth
+ BufferedReader groundTruthReader = new BufferedReader(new FileReader(
+ "src/test/resources/neuralnets_classification_label.txt"));
+ List<Double> groundTruthList = new ArrayList<Double>();
+ String line = null;
+ while ((line = groundTruthReader.readLine()) != null) {
+ groundTruthList.add(Double.parseDouble(line));
+ }
+ groundTruthReader.close();
+
+ BufferedReader resultReader = new BufferedReader(new FileReader(
+ RESULT_PATH));
+ List<Double> resultList = new ArrayList<Double>();
+ while ((line = resultReader.readLine()) != null) {
+ resultList.add(Double.parseDouble(line));
+ }
+ resultReader.close();
+ int total = resultList.size();
+ double correct = 0;
+ for (int i = 0; i < groundTruthList.size(); ++i) {
+ double actual = resultList.get(i);
+ double expected = groundTruthList.get(i);
+ if (actual < 0.5 && expected < 0.5 || actual >= 0.5 && expected >= 0.5) {
+ ++correct;
+ }
+ }
+ System.out.printf("Precision: %f\n", correct / total);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ fs.delete(new Path(RESULT_PATH), true);
+ fs.delete(new Path(MODEL_PATH), true);
+ fs.delete(new Path(SEQTRAIN_DATA), true);
+ }
+ }
+
+ private void neuralNetworkTraining() {
+ String mode = "train";
+ String strTrainingDataPath = "src/test/resources/neuralnets_classification_training.txt";
+ int featureDimension = 8;
+ int labelDimension = 1;
+
+ Path sequenceTrainingDataPath = new Path(SEQTRAIN_DATA);
+ Configuration conf = new Configuration();
+ FileSystem fs;
+ try {
+ fs = FileSystem.get(conf);
+ SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+ sequenceTrainingDataPath, LongWritable.class, VectorWritable.class);
+ BufferedReader br = new BufferedReader(
+ new FileReader(strTrainingDataPath));
+ String line = null;
+ // convert the data in sequence file format
+ while ((line = br.readLine()) != null) {
+ String[] tokens = line.split(",");
+ double[] vals = new double[tokens.length];
+ for (int i = 0; i < tokens.length; ++i) {
+ vals[i] = Double.parseDouble(tokens[i]);
+ }
+ writer.append(new LongWritable(), new VectorWritable(
+ new DenseDoubleVector(vals)));
+ }
+ writer.close();
+ br.close();
+ } catch (IOException e1) {
+ e1.printStackTrace();
+ }
+
+ try {
+ NeuralNetwork.main(new String[] { mode, SEQTRAIN_DATA,
+ MODEL_PATH, "" + featureDimension, "" + labelDimension });
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/3779483e/src/test/resources/dimensional_reduction.txt
----------------------------------------------------------------------
diff --git a/src/test/resources/dimensional_reduction.txt b/src/test/resources/dimensional_reduction.txt
new file mode 100644
index 0000000..0528d42
--- /dev/null
+++ b/src/test/resources/dimensional_reduction.txt
@@ -0,0 +1,1600 @@
+-5.214647 7.087974 6.729247
+-0.421762 8.426638 7.896089
+-6.134601 5.686436 6.088567
+6.212569 8.307839 2.362335
+6.344603 7.664955 0.532386
+0.098673 6.992156 7.840777
+2.261138 8.088781 7.203263
+6.319323 6.770813 1.717501
+3.039391 6.589591 6.782867
+5.188841 7.283918 -2.624659
+-2.593684 7.969614 7.754495
+0.757685 7.698148 7.719040
+-2.493988 7.895625 7.774168
+-6.348292 9.235869 5.912167
+5.811158 8.661124 3.604759
+5.084188 9.415847 4.881474
+3.233400 10.133548 6.658328
+-2.671727 5.834501 7.738187
+-5.586962 6.801060 6.491443
+-0.300033 7.474045 7.886303
+2.959122 8.991690 6.831929
+-6.183061 8.313371 6.049571
+4.871536 9.088393 5.161636
+-7.851500 8.035905 4.251177
+6.314192 6.764987 0.220085
+4.037797 7.132615 6.039136
+4.866675 7.269669 5.167711
+2.389975 6.966941 -4.607930
+6.036700 7.196553 3.010703
+3.487451 7.463430 6.481716
+5.128315 6.459363 4.819489
+-3.312319 8.552966 7.573618
+5.381252 7.880799 4.433421
+-2.296213 7.214618 7.809371
+4.936740 7.142252 5.078796
+6.356512 7.265930 0.749298
+-5.989940 7.693886 6.201601
+-0.330259 8.197873 7.888911
+6.353076 7.452645 1.306126
+0.739180 7.147566 7.723272
+2.828880 6.862924 6.908594
+4.856910 6.987142 5.179873
+5.657584 8.035364 3.933724
+-1.239718 8.616024 7.912864
+-5.292073 8.727537 6.682023
+4.630170 6.844327 5.447683
+-7.776487 6.028616 4.357342
+0.254217 8.814937 7.817341
+-5.268034 7.535614 6.696807
+5.380876 8.718970 4.434040
+-1.523348 8.209247 7.898946
+4.738628 8.587521 -3.159172
+-9.342977 7.790923 -2.589265
+3.885419 6.894194 6.170687
+-2.898182 3.924822 7.686316
+-6.854335 6.854464 5.444460
+-3.369974 7.406866 7.556063
+6.350405 6.123195 0.621545
+4.233985 6.918638 5.858301
+4.322963 6.955515 5.771680
+-0.239685 7.946362 7.880742
+1.511782 7.418960 7.504429
+2.047813 9.715201 7.298597
+-9.452372 8.323733 -0.308520
+-9.453994 6.873090 -0.331165
+-9.253227 6.825797 -3.051144
+-6.102497 8.076519 6.114084
+6.258758 7.557662 2.129656
+4.954859 8.240420 -2.919076
+6.050266 8.209733 2.968663
+-2.841788 6.648304 7.699870
+3.024167 7.917924 6.792281
+3.114077 8.100438 6.735939
+3.632127 6.326400 6.373764
+-6.966740 7.420432 5.329874
+-5.306197 7.406685 6.673285
+3.354050 4.534522 6.576440
+-1.342129 6.924848 7.909008
+-8.440894 8.444134 3.276504
+3.953765 5.767462 6.112599
+-0.353391 7.464577 -4.622600
+-5.600283 7.198775 6.482420
+-1.138683 6.957302 -4.305837
+1.287620 5.885861 7.577024
+3.140525 7.719961 6.719019
+4.439921 8.120649 5.653091
+5.843908 8.567449 3.528124
+-1.999807 7.143920 7.852706
+-6.998283 8.320347 5.296949
+-1.641686 8.000756 7.890141
+-0.637122 7.255327 7.908746
+3.545277 8.370231 6.439237
+-5.057813 8.143913 6.821470
+-1.788540 7.325451 7.876755
+3.231971 5.134089 -4.275558
+0.014418 9.135857 7.852132
+-1.049889 8.435574 7.916518
+5.229370 8.210316 4.671893
+2.998831 7.019181 6.807834
+5.822185 7.044617 3.579246
+3.502988 8.529447 6.470388
+3.556294 8.944615 6.431043
+0.275645 7.626098 7.813860
+6.350581 8.483290 0.624596
+-1.029876 9.017951 7.916638
+-1.236492 8.501062 7.912964
+6.214590 8.318062 2.353006
+2.357908 5.884950 7.157349
+4.657961 8.877248 5.416277
+-3.966537 6.800191 7.346643
+1.655427 7.619326 7.453822
+-3.261750 7.149449 7.588636
+5.816712 7.414349 -1.557348
+1.032510 5.810382 7.650472
+6.294199 6.563924 0.071914
+1.120549 7.415623 7.626208
+5.858183 8.279860 3.493880
+-2.267918 8.699031 7.813994
+6.141022 6.650649 2.658369
+2.042799 7.661015 7.300742
+1.572004 8.557056 7.483607
+4.002859 7.401975 6.069959
+-8.902572 8.193579 2.234928
+-6.757595 6.495666 5.539775
+5.814173 6.947760 3.597812
+3.928729 8.669335 -3.858716
+2.746337 5.551667 -4.487923
+6.222903 7.098401 2.313919
+3.296003 7.563959 -4.243062
+4.094151 5.513191 5.988561
+-2.383802 8.495619 7.794405
+-8.119292 7.726085 3.842728
+-1.721302 8.385161 7.883222
+5.840237 7.248265 3.536846
+6.045281 8.429626 -0.974619
+-6.645581 7.511995 5.646504
+3.676010 6.462470 6.339899
+-6.524017 6.610462 -7.983916
+3.022285 8.377986 6.793441
+5.535313 8.150169 4.167033
+5.417565 7.607157 4.373057
+-7.903988 7.482833 4.174878
+5.886684 7.312755 3.423871
+0.275682 6.865824 7.813853
+-9.015928 8.891424 -3.931973
+-1.964493 8.183805 7.857120
+0.868877 6.122895 7.692594
+-6.501610 5.924574 5.778242
+-4.445926 8.431146 7.139840
+3.630562 7.560270 6.374962
+-0.724959 7.132482 7.912209
+3.984812 8.029555 6.085724
+3.112338 5.620074 6.737046
+6.158734 7.283831 2.590392
+3.505755 8.124096 6.468364
+3.901459 5.420980 6.157185
+-3.650869 7.628908 7.463849
+3.777994 7.352483 6.259097
+2.904796 6.157412 6.864344
+-7.607583 6.222248 4.584928
+-4.892777 9.297294 6.913692
+6.361003 6.435683 0.991536
+-8.797081 8.179497 -4.544607
+-8.796629 8.225805 2.507647
+6.359129 9.044684 1.142701
+1.193283 6.636904 7.605304
+-0.747523 6.669483 7.912940
+1.997140 5.531188 7.320083
+-0.352661 6.222056 7.890768
+5.849521 6.999574 3.514722
+-1.104270 7.883201 7.915935
+2.862075 8.044723 6.889394
+-2.952922 6.923279 7.672752
+-4.735157 6.955731 6.997307
+-0.181899 6.131445 7.874975
+1.418946 8.296576 7.535425
+-8.385729 8.203527 3.380724
+0.424236 7.739502 7.788016
+1.542411 8.581142 7.493910
+3.148428 7.671558 6.713933
+-8.159330 9.583673 3.777221
+5.351837 7.865185 4.481318
+0.300457 9.028326 7.809751
+-7.052613 8.727547 5.239419
+5.823358 10.398561 -1.542658
+6.346598 4.926079 1.416869
+-1.320844 6.475045 7.909918
+-2.261126 9.082600 7.815089
+1.895567 9.200700 7.361846
+-1.584108 7.255155 7.894646
+5.644996 4.143754 -1.904898
+4.966373 7.129021 5.040286
+6.346146 8.891149 1.423544
+5.942655 7.292993 3.279345
+5.224576 9.170574 -2.575991
+0.711951 8.680131 7.729412
+4.371493 6.613097 5.723145
+-2.321962 5.922016 7.805074
+5.039264 8.445435 -2.817415
+6.266070 8.601982 2.088217
+-3.598651 7.143981 7.481840
+-0.668148 5.607563 7.910081
+6.245001 7.990574 -0.215363
+-3.184933 8.201757 7.610774
+-7.744719 7.206444 -6.521306
+-3.850260 6.913175 7.391520
+-4.029040 6.273264 7.321684
+4.252708 10.519454 5.840322
+-9.466489 9.109256 -0.542468
+6.117972 7.595596 2.742709
+-9.173549 7.484529 1.374899
+-0.022494 6.172525 7.856812
+-5.760021 6.991215 6.371314
+6.244617 6.520502 -0.217323
+1.607393 6.167728 7.471106
+6.044820 8.261442 2.985655
+2.244340 8.182271 7.211061
+4.557560 7.204057 5.528005
+-8.086230 6.975473 3.895884
+-2.121861 7.079526 7.836223
+-7.691109 6.849068 -6.598656
+-6.820449 7.711009 5.478187
+0.184109 8.427748 7.828303
+1.907641 7.339092 7.356973
+3.424941 7.709557 -4.174253
+-6.153544 7.248713 6.073392
+3.524789 7.606886 6.454387
+3.539394 7.409512 6.443599
+0.548160 9.020287 7.764171
+2.533926 7.235065 7.069406
+-2.419615 7.439628 7.788000
+0.102776 7.542637 -4.732985
+6.291273 6.933579 1.931287
+4.855127 6.289525 5.182087
+-6.765639 5.535820 5.531962
+-1.100403 6.350964 7.915989
+-9.473760 7.699958 -1.253486
+5.680254 7.902587 -1.838043
+-0.335856 6.952193 7.889381
+4.575161 7.371058 5.508760
+2.505241 10.157312 -4.572280
+0.369977 8.331159 7.797799
+-8.453016 5.951523 3.253151
+5.805336 9.182754 3.618115
+1.730073 9.662341 7.426234
+4.997530 6.370577 4.999191
+-3.249081 7.502106 7.592343
+-9.477245 7.115397 -1.025054
+5.750761 5.547668 -1.697717
+-2.605591 6.960813 7.752059
+5.786660 7.811105 3.660435
+-7.248153 6.970221 5.023369
+-9.477241 7.067643 -0.962881
+4.286932 7.717918 5.807119
+2.131911 7.675562 -4.677103
+5.866604 6.504499 3.473428
+-9.321725 8.247840 0.710182
+4.522854 8.831682 5.565542
+1.824403 8.171250 7.390083
+3.895328 7.311882 6.162355
+0.500799 7.841007 7.773532
+6.360553 6.134380 1.063968
+-1.243760 9.391655 7.912737
+-3.372722 6.932812 7.555215
+6.357561 7.453840 0.779004
+2.587772 6.191203 7.041330
+4.060982 8.791239 6.018459
+5.602012 8.081433 4.042479
+-9.077068 6.740655 -3.733853
+3.981327 9.929923 6.088756
+5.920037 6.508152 3.338944
+-3.276955 9.328079 7.584158
+6.003296 7.780698 3.110387
+6.359511 8.711738 0.850391
+-4.866301 8.356692 6.928038
+-6.916398 7.079579 5.381715
+-0.102307 7.976592 7.866321
+3.979945 7.817220 6.089957
+4.211940 6.575031 5.879302
+5.887451 6.015154 3.421953
+-0.733913 7.878341 7.912507
+-3.125329 6.697193 7.627394
+-6.922420 8.085911 5.375560
+3.286479 7.029660 -4.247965
+3.687861 7.261034 -4.019285
+5.681640 7.883537 3.885107
+1.263660 8.481316 7.584332
+6.347892 5.847123 0.580506
+5.262682 6.908178 4.621396
+-3.782596 7.820711 7.416715
+0.061697 9.240942 7.845876
+5.729288 8.707359 3.785810
+5.166742 7.690181 4.764320
+-0.091667 7.694332 7.865102
+-8.476749 7.434249 -5.269119
+-5.718910 8.261278 6.400428
+-4.201475 7.439952 7.249750
+0.250055 6.496613 7.818010
+6.303937 6.958160 1.841469
+6.358481 7.403499 0.809088
+5.617285 4.820202 -1.956023
+6.224497 6.137625 -0.315755
+-4.868422 6.989113 6.926893
+2.872551 7.663263 6.883287
+0.427736 6.187043 7.787372
+5.170819 6.040786 4.758399
+6.247120 9.079665 2.192629
+3.647424 6.889324 6.362020
+4.301086 5.540704 5.793258
+2.141243 9.051478 7.257823
+3.458850 7.566013 6.502405
+3.955735 7.960201 6.110903
+-4.908849 7.589488 6.904924
+2.083486 7.460633 7.283207
+5.391952 7.996370 4.415779
+6.346483 6.635142 0.559219
+-4.257017 8.559807 7.225604
+-2.037754 6.963493 7.847785
+1.238738 6.348253 -4.807544
+2.192859 7.660561 7.234647
+4.835362 7.332150 5.206514
+0.648422 8.661534 7.743335
+-1.521003 6.165460 7.899103
+6.083312 6.844283 2.862010
+4.735022 8.063465 5.327186
+4.636124 8.031571 -3.263851
+1.410927 7.979688 7.538041
+5.738476 6.918446 3.766177
+-6.375569 6.960824 5.888803
+-1.170237 8.480965 7.914728
+6.320491 8.625886 0.273281
+6.251438 9.217744 -0.181965
+-2.234117 8.170640 7.819382
+4.944896 6.349166 -2.930763
+2.297860 7.184230 7.186040
+6.070834 7.694718 2.903031
+-9.274463 8.414727 -2.952000
+3.217512 6.933662 6.668853
+-6.102736 7.751450 6.113895
+5.772427 6.752888 3.692175
+5.791323 7.481863 3.649942
+5.568795 7.088601 4.105280
+0.577830 7.832262 7.758150
+4.144203 5.926029 -3.698474
+6.129768 6.829884 -0.699540
+-3.297193 8.024220 7.578148
+6.358020 7.620347 0.793416
+3.902236 6.684126 6.156529
+4.224781 7.729601 5.867091
+-1.270074 7.713791 7.911859
+6.359562 7.721199 1.123708
+-2.708990 7.785976 7.730119
+6.349131 8.407377 0.600185
+6.021141 7.252759 -1.045575
+3.147632 7.237756 6.714446
+5.400282 6.420153 4.401961
+6.354156 8.134127 1.283659
+0.803408 6.196823 7.708377
+5.402218 6.184263 4.398739
+-5.233859 9.140946 6.717635
+4.011303 7.469947 6.062546
+-1.962944 7.524212 7.857310
+-9.346823 6.747060 0.568108
+4.266689 7.563927 -3.599749
+6.284477 7.934805 0.008150
+-7.089520 8.879071 5.199737
+6.316611 8.114970 0.240059
+3.048044 7.632666 6.777494
+0.910120 7.493826 7.682340
+-3.917933 8.683322 7.365646
+5.434955 7.994632 -2.265536
+4.233330 8.035306 5.858927
+6.118830 7.008004 2.739644
+0.353716 6.738118 7.800653
+3.585398 7.109843 6.409244
+-0.551278 7.456853 7.904411
+-3.326026 4.393035 7.569487
+2.085628 8.474764 7.282276
+6.105830 9.409802 2.785460
+-6.229617 7.220079 6.011559
+5.247765 8.136057 -2.543826
+-0.574499 7.308740 7.905677
+-0.729957 8.409236 7.912376
+-0.345802 8.516231 7.890206
+2.569365 7.975445 7.050990
+4.631701 7.908545 5.445962
+2.927578 8.951455 6.850827
+6.143835 6.242815 2.647770
+2.083295 7.110173 7.283290
+-3.881560 7.792531 7.379637
+6.024319 9.191448 3.048260
+-6.397327 7.901329 5.870025
+4.647852 8.578470 5.427744
+4.964773 9.060097 -2.907381
+4.124430 7.313521 5.960941
+-8.072332 6.383679 3.917983
+-9.265460 7.468098 -2.994653
+4.167869 9.560358 5.920757
+-6.457934 8.405787 5.817045
+6.245927 8.113575 2.198894
+2.200433 14.164363 7.231206
+6.277532 14.095384 2.019850
+-4.220554 13.524413 7.241510
+-1.318065 12.418892 7.910033
+-8.149477 13.914129 -5.880997
+-3.887267 12.666497 7.377455
+0.148262 12.069997 7.833655
+5.094620 8.701143 4.866950
+3.949762 11.800855 6.116041
+1.511088 12.142680 7.504666
+6.276827 9.878912 2.024190
+6.355193 12.665769 1.260367
+5.017600 11.788278 4.972383
+-7.360885 13.129024 4.891973
+-5.622475 14.109412 6.467306
+-4.158722 13.220162 7.268011
+2.284093 12.306755 7.192525
+-3.644775 10.761219 7.465969
+-7.657254 11.230894 4.519568
+-0.258630 11.401916 7.882539
+-0.350257 14.186523 7.890572
+-9.123218 11.672188 -3.573463
+-2.949955 12.359559 7.673498
+6.174855 12.262373 2.525785
+3.044262 13.392558 6.779844
+2.220515 12.822477 7.222035
+2.195097 11.007434 7.233631
+0.100823 13.814423 7.840475
+5.373599 10.806169 -2.360648
+-1.604513 13.561378 7.893097
+3.867103 12.184161 6.186007
+3.698579 12.144320 6.322273
+4.513979 12.038316 5.575054
+4.227247 14.107116 5.864738
+1.120093 13.679655 7.626336
+6.188756 11.989198 -0.473422
+3.438085 12.280810 6.517294
+6.112719 10.956084 2.761336
+-3.187831 11.986597 7.609954
+3.933784 13.348806 6.129733
+0.182424 12.800039 7.828558
+2.048057 11.698212 7.298492
+3.928296 13.008112 6.134416
+-2.308261 11.020246 7.807371
+-6.045383 12.809784 6.158867
+6.155309 12.219690 -0.605931
+0.240435 10.818987 7.819548
+6.108442 11.364076 2.776353
+4.545351 12.861107 5.541272
+0.235485 12.115360 7.820334
+-8.667115 12.855218 2.810004
+0.284741 13.335377 7.812363
+3.887776 13.376884 -3.887347
+-8.887502 13.046304 -4.306740
+2.136939 12.566704 7.259735
+4.528727 13.555790 5.559227
+-2.438147 11.251261 7.784621
+-8.458652 11.071519 3.242236
+2.806073 13.502271 6.921654
+-7.725708 12.797749 4.427379
+-0.003525 11.421310 7.854430
+6.358981 12.445085 1.148688
+4.585620 13.790860 5.497256
+3.143811 11.802578 6.716906
+-4.133530 12.826963 7.278638
+-2.439599 11.199172 7.784354
+4.217928 12.501718 5.873615
+2.049950 11.928073 7.297681
+2.949844 11.699537 6.837510
+2.456571 11.932754 7.108770
+4.029759 12.196578 6.046263
+-2.190809 11.503567 7.826071
+-6.459364 12.207927 5.815784
+4.536451 11.906068 5.550900
+-3.589441 11.646578 7.484972
+5.546791 10.098034 -2.080909
+-5.405388 13.270638 6.610830
+1.061260 10.977444 7.642673
+6.290275 11.654097 1.938002
+-4.230323 12.889407 7.237269
+6.203905 12.010467 -0.408922
+4.772615 12.709266 5.282614
+-1.431253 11.690071 7.904576
+2.580461 11.803336 7.045174
+-8.594275 10.839684 2.967809
+5.871124 13.221555 3.462370
+2.153124 13.809412 7.252530
+6.359263 11.244083 0.839400
+4.429757 12.805778 -3.458944
+-0.820555 12.311015 7.914864
+5.847632 13.556211 3.519243
+5.314352 13.779052 -2.448780
+1.945985 11.703117 7.341333
+-4.670287 13.355747 7.030485
+5.135028 11.670193 4.809933
+-9.311509 13.428087 -2.765352
+3.566544 11.807070 6.423392
+-3.699740 12.688370 7.446655
+1.232220 11.272387 7.593792
+-9.191700 10.931599 1.304463
+3.955532 10.072039 6.111078
+1.418355 11.334162 7.535618
+-3.643768 12.687898 7.466318
+-7.684714 12.596101 4.482886
+5.833969 13.917750 3.551659
+6.308725 12.920192 1.804926
+-1.940517 13.537663 7.860027
+1.530055 12.863642 7.498171
+5.886935 10.905312 -1.396406
+5.005207 12.428249 -2.859015
+3.903090 10.956713 6.155808
+-6.420539 11.382701 5.849852
+1.216096 13.316237 7.598587
+0.875209 13.006677 7.691036
+-1.997680 13.429089 7.852976
+-1.973448 12.520848 7.856016
+-3.425593 11.960627 7.538689
+6.268910 12.903694 -0.085811
+5.990917 12.857821 3.146061
+1.012205 10.830698 7.655909
+-1.846537 12.420164 7.870717
+6.183338 13.237705 2.490626
+5.448923 14.035248 4.319779
+5.995965 13.767690 3.131592
+6.274832 14.495142 -0.051112
+6.359164 12.723624 0.835211
+-7.593493 11.184679 4.603240
+3.083310 12.681520 6.755422
+0.260013 11.221469 7.816405
+0.343636 10.990475 7.802404
+6.277683 12.464771 2.018922
+-3.066784 12.482497 7.643248
+5.781331 12.400478 3.672372
+-3.621242 13.068836 7.474104
+-1.931696 13.049244 7.861078
+0.837351 12.632811 7.700270
+1.967123 11.270134 7.332606
+-9.394388 14.145462 0.253446
+-2.534120 13.438985 7.766405
+0.227254 11.350399 7.821635
+5.028503 15.122921 4.957706
+-0.948655 14.875576 7.916607
+3.986816 12.556429 6.083979
+2.514664 11.912112 -4.569233
+5.708457 11.516965 3.829728
+1.422349 11.121027 7.534313
+2.736121 12.413041 6.961043
+6.351253 11.739322 1.340829
+3.079889 11.625464 6.757575
+-2.089229 12.031501 7.840816
+-2.193304 12.123252 7.825692
+1.889709 10.868271 7.364203
+-3.206395 11.656659 7.604671
+6.148047 14.577196 2.631759
+-4.757755 14.360336 6.985581
+6.313249 12.424539 0.212440
+-5.792592 9.989841 6.347988
+2.163711 10.312711 7.247792
+6.258981 13.025250 2.128419
+-9.195631 11.647591 1.288903
+2.210246 12.189040 7.226733
+-5.003531 14.103574 6.852340
+5.550904 13.159012 -2.073816
+5.844144 13.173195 3.527564
+-5.941823 11.730335 6.238107
+-1.076183 13.556973 7.916283
+-9.417359 12.726085 -2.060862
+0.601662 13.071128 7.753225
+6.328447 11.973070 1.632760
+6.241337 13.075520 2.222702
+-6.916809 13.412766 5.381296
+5.773889 12.341480 -1.649569
+-1.917718 13.168649 7.862723
+6.261572 11.640070 2.113891
+5.714205 13.660235 3.817691
+4.934298 12.570212 5.081946
+3.293490 11.983398 6.617982
+2.273933 11.513960 7.197289
+3.272921 12.765062 -4.254902
+-3.338229 12.549393 7.565786
+2.291945 12.962360 7.188830
+5.323405 12.042522 4.526801
+6.163162 12.864248 2.572920
+3.642408 13.745150 6.365878
+-3.154212 13.596934 7.619401
+6.331762 11.176867 1.599055
+-7.429717 13.007145 4.809099
+6.133577 13.712728 2.686094
+6.360139 11.659783 1.093091
+3.321973 12.503798 6.598555
+4.630000 12.749431 5.447874
+-0.187077 11.757690 7.875510
+-0.026942 12.323031 -4.706588
+5.898208 13.115250 3.394906
+5.113889 12.352899 4.839911
+4.650267 12.218878 5.425009
+1.829654 10.485933 7.388028
+1.795357 12.294968 7.401369
+6.277540 12.579757 2.019801
+5.888066 12.701606 -1.393705
+-6.543243 12.863081 5.740756
+-2.852873 12.457608 7.697240
+1.664032 12.414204 7.450688
+1.366065 11.593582 7.552490
+4.471176 10.306188 5.620444
+-0.247407 10.363644 7.881480
+-7.280583 12.610463 4.986106
+3.403407 11.526202 6.541915
+-9.476786 12.750985 -0.896430
+3.846335 11.429080 6.203254
+-6.349315 13.675906 5.911294
+1.336542 12.372627 -4.801329
+-6.825960 11.448446 5.472727
+-4.207928 11.107317 -9.714631
+4.081699 12.052876 5.999829
+-5.513027 11.646216 6.540860
+4.861402 13.063875 5.174285
+5.812969 13.018677 3.600589
+4.918862 11.593634 5.101766
+6.360986 14.440337 0.973071
+6.354472 10.587327 0.700211
+5.860518 11.968779 -1.458480
+4.591805 10.905904 5.490430
+3.091216 15.205112 6.750436
+5.750453 10.551481 3.740338
+5.778717 11.294675 -1.639376
+6.193853 11.509911 -0.452065
+2.387238 12.996018 7.143096
+-3.947961 13.152992 7.353947
+1.393332 14.624528 -4.796817
+-5.124594 12.675945 6.782756
+-2.636198 13.273949 7.745711
+3.206701 11.884348 6.675981
+-0.746332 11.923439 7.912903
+0.605017 10.787804 7.752526
+6.131423 13.038070 -0.693648
+6.205244 13.275952 -0.403064
+-5.561020 11.049258 6.508909
+-6.083835 12.775993 6.128803
+-5.487547 12.654022 6.557633
+-9.161911 10.765233 1.418888
+2.692253 12.710407 6.985239
+6.344149 12.979093 0.526139
+3.651573 10.832002 6.358823
+6.172976 12.897683 2.533460
+0.172148 11.975954 7.830108
+-1.892359 11.576016 7.865645
+-5.500278 13.369908 6.549269
+5.658250 12.116016 3.932391
+-5.080908 11.447498 6.808173
+0.519807 11.466808 7.769812
+-6.060278 14.865419 6.147263
+-7.464249 11.110530 4.766732
+4.338877 15.118552 5.755867
+-8.627323 12.997509 2.897199
+4.015015 11.515665 6.059280
+-3.185039 13.168533 7.610744
+6.229678 13.322411 -0.291177
+6.353475 11.503833 1.298020
+3.771560 13.143656 6.264283
+6.169827 12.485710 2.546237
+-2.410118 12.906419 7.789715
+4.796591 12.690493 5.253791
+6.348361 11.880059 0.587843
+-0.195490 10.731857 7.876371
+-1.688917 12.856378 7.886133
+4.789836 12.629618 5.261943
+5.746570 11.622587 3.748745
+6.265486 12.448909 2.091582
+4.930546 12.092473 5.086777
+-6.084301 11.766311 6.128437
+5.140407 13.570652 4.802251
+5.713530 12.540809 3.819107
+3.292885 12.807401 6.618393
+-8.375042 12.816528 3.400541
+-1.849052 14.102131 7.870446
+6.360283 12.820072 0.892379
+-5.694074 13.916474 6.417840
+-1.709123 14.323741 7.884333
+-4.788526 12.469959 6.969475
+0.972192 14.028338 7.666447
+3.011308 13.105401 6.800192
+1.468204 12.717187 7.519144
+-0.128018 11.544309 7.869207
+1.769517 14.310779 7.411293
+3.416378 13.297534 6.532741
+6.091342 12.817288 2.835098
+4.903210 13.718114 5.121715
+1.861891 13.165251 7.375313
+5.894992 11.682738 -1.377079
+-3.290961 13.090343 7.580004
+-2.550072 12.844061 7.763261
+6.358691 12.985470 0.816711
+-6.282860 12.918771 5.967415
+0.582429 11.916590 7.757206
+5.509402 13.611536 4.213812
+4.462385 13.698571 -3.429406
+-2.074580 11.103913 7.842834
+-2.007179 12.690377 7.851764
+3.328435 13.246261 6.594120
+-4.361705 14.180183 7.178776
+5.427762 13.230125 4.355852
+-2.564246 10.412587 7.760439
+0.511257 13.505404 7.771491
+-7.118429 12.853766 5.168306
+-9.475765 13.800358 -0.824356
+-4.653438 12.465317 -9.455134
+-0.052124 12.139902 7.860440
+3.289446 11.502707 6.620725
+6.305397 11.844576 1.830494
+6.074502 11.202684 -0.884608
+-8.161062 11.140775 3.774358
+-5.573957 11.733566 6.500216
+2.046286 12.404400 7.299250
+4.699889 11.882113 -3.199379
+0.346451 13.200982 7.801916
+5.453170 13.565698 4.312478
+-8.778350 11.780831 2.552213
+1.446117 11.222921 7.526491
+5.733850 11.498124 -1.732233
+3.570606 12.396087 -4.090901
+2.283106 13.869582 7.192989
+1.005622 12.366440 7.657658
+-7.628607 11.979518 4.557416
+4.602979 13.059481 5.478051
+-7.374732 11.122810 4.875466
+5.695161 14.157429 -1.809139
+-8.930701 12.672494 2.157866
+4.604108 13.273024 5.476797
+-5.565179 12.244452 6.506119
+0.289461 16.133265 7.811582
+0.548967 11.458822 7.764009
+4.012242 12.936111 6.061721
+5.740129 9.532853 -1.719483
+1.335582 14.451628 7.562134
+-3.387440 10.952416 7.550654
+5.525784 12.191532 4.184337
+6.102361 11.889337 2.797478
+4.274842 12.854650 5.818899
+1.795004 11.690718 7.401505
+3.559171 12.862841 6.428899
+4.015833 12.018083 6.058560
+0.748977 11.787962 7.721038
+6.317957 10.756836 1.729344
+4.428278 12.278813 -3.460272
+4.980967 14.513476 5.021115
+-4.615852 13.423309 7.057780
+-8.992003 12.593540 1.981944
+6.315022 12.590719 1.754185
+1.492497 12.890922 7.510977
+4.255104 12.878121 5.838012
+-3.824468 10.568661 7.401203
+-9.378446 11.659904 -2.363303
+5.784078 15.986412 3.666226
+3.532043 11.874227 6.449036
+4.746011 13.176407 5.314234
+-9.427570 11.192457 -1.965880
+-2.733807 12.664026 7.724643
+1.495107 13.675998 7.510095
+0.897727 11.975868 7.685447
+-9.083527 11.597328 -3.712014
+-0.652447 12.807256 7.909421
+1.479503 11.562107 7.515357
+6.355416 11.297017 1.255073
+-7.650590 12.121900 4.528409
+-5.959013 11.854527 6.225127
+-0.086290 13.258193 7.864480
+-7.606603 11.621420 4.586204
+-7.326432 13.920220 4.932690
+0.262480 10.668410 -4.760242
+-5.978467 12.509034 6.210354
+2.050260 13.495431 7.297548
+-7.207742 12.208868 5.069218
+4.969893 11.880338 5.035675
+5.687324 10.996126 3.873475
+2.761243 11.914754 6.947012
+3.239989 14.571603 6.653946
+6.170931 11.848683 -0.545584
+5.779203 12.955048 -1.638347
+-2.195182 11.747440 7.825406
+-0.472425 13.861354 7.899600
+-4.767957 12.105010 6.980259
+-2.996437 12.406511 7.661683
+-9.261020 13.100410 1.011002
+-9.462484 12.115588 -1.524710
+-3.174954 11.106646 7.613591
+2.506613 13.339085 7.083434
+-2.677340 11.795304 7.736984
+6.219530 13.050186 2.329921
+3.336722 14.179254 6.588418
+4.221149 12.546914 5.870551
+1.913723 12.890158 7.354508
+-1.653337 12.937776 7.889179
+6.347230 12.100841 1.407362
+6.360152 11.963744 0.884033
+1.093849 11.142433 -4.813126
+3.964864 12.416487 6.103027
+0.194810 12.225559 7.826672
+-1.059100 12.965112 7.916445
+6.329662 11.932717 0.358924
+12.117801 7.615474 -2.463974
+12.476514 5.315066 2.802028
+12.283168 7.221501 3.839794
+12.603519 7.435058 0.750364
+12.592326 6.902404 1.582770
+9.170913 7.858727 -7.548165
+7.499041 6.116741 -8.970199
+6.869344 6.419820 -9.381533
+12.605719 6.432333 0.923674
+11.224176 6.819834 6.772856
+12.263786 8.649724 -1.911026
+12.360116 7.944104 3.480738
+12.016205 8.681077 -2.797770
+6.715064 5.769742 11.851487
+3.304638 7.618947 13.498445
+2.652706 9.302665 -10.916271
+12.198309 7.961886 -2.171957
+10.996977 8.512681 7.203566
+4.538645 5.885145 13.034513
+6.432457 7.909771 12.035184
+10.003697 8.758199 8.740944
+4.720601 8.631316 -10.400484
+-3.333133 7.570941 -10.144582
+10.727716 7.270708 -5.569854
+12.597712 8.025809 1.452255
+3.711269 7.638358 -10.704200
+10.560779 9.236596 -5.827907
+10.379283 7.613797 -6.093002
+-6.247664 6.468697 13.173585
+5.640737 7.111267 -10.030591
+4.954559 7.276987 -10.315167
+-2.928065 8.231343 14.040700
+-4.534519 7.544332 13.726638
+9.976473 7.938843 8.777152
+3.717911 10.089548 -10.702532
+10.817113 8.620865 -5.425469
+-3.103877 8.278997 -10.241276
+5.400397 8.898638 -10.136236
+6.120927 9.093006 -9.799335
+7.008441 8.161593 11.649963
+7.700290 6.716768 -8.826038
+12.432928 6.803510 3.082792
+1.224284 7.922068 13.996476
+12.058273 8.341711 4.686799
+9.872558 7.029309 -6.761280
+9.960082 7.203683 -6.652607
+8.156103 7.744110 -8.474580
+9.304862 7.261491 -7.408487
+12.215779 6.742451 4.119245
+12.572268 7.495197 0.077119
+3.889592 5.510177 -10.657932
+2.695033 9.675566 13.679702
+11.825405 7.049338 -3.348740
+12.344662 8.548160 3.557042
+-5.092813 6.342415 13.572089
+7.687894 7.226831 11.137239
+0.545483 8.299589 14.087910
+2.561366 6.515293 -10.929653
+-6.099383 7.351244 -8.382270
+12.200227 7.495353 4.180085
+8.266143 7.785569 -8.384221
+12.533724 7.268476 -0.348547
+12.598801 7.130035 0.573150
+12.297999 7.001475 3.774335
+4.048360 7.360770 -10.614112
+8.872720 6.945690 10.060920
+12.542827 5.578369 2.258338
+12.602721 8.515038 0.712509
+7.286051 7.962592 -9.115883
+6.954644 8.784232 11.687769
+12.448153 7.276414 -0.987164
+12.335934 7.950223 -1.590987
+12.536696 6.363012 2.318113
+11.917202 6.653421 -3.094186
+12.590491 7.132051 0.373562
+12.316961 8.367514 -1.679085
+12.393544 6.882112 -1.301746
+5.559799 7.292366 12.542932
+12.436356 8.214343 3.062120
+-2.073902 8.565516 -10.601441
+3.106069 7.628667 13.560833
+12.334180 7.305677 -1.599262
+3.345721 7.423119 13.485127
+8.986596 6.983190 -7.733120
+-8.287909 6.497748 -5.634545
+12.186905 8.300797 4.231241
+10.485464 4.578501 -5.939766
+8.015121 7.559538 -8.587133
+8.164512 8.288012 -8.467754
+5.044384 7.179834 -10.280860
+11.017901 6.517774 7.165477
+10.077070 7.193753 8.642057
+4.275679 7.833303 -10.547008
+6.048655 8.797256 -9.835909
+11.731815 7.346617 -3.591642
+7.222918 7.879450 -9.157749
+-1.858404 9.084682 -10.662265
+10.485500 7.391798 8.053432
+10.832532 6.598463 7.492996
+12.524820 6.870312 -0.428713
+9.339466 8.506801 9.557868
+10.347612 6.370546 -6.137741
+12.591658 8.151038 1.596983
+11.836186 5.860574 5.355112
+12.510739 8.290920 2.545682
+6.796725 7.501466 -9.425317
+8.180317 7.192581 -8.454888
+10.883362 6.485569 -5.315454
+8.504249 7.530093 -8.180837
+-0.724270 6.081097 -10.905232
+10.230831 7.691686 8.428378
+11.659548 6.699526 5.814559
+9.368641 5.855153 -7.340348
+2.159320 8.411914 13.814369
+12.346853 8.821002 -1.538822
+4.613264 6.143349 13.002049
+7.299669 7.867710 11.438387
+11.171941 7.129693 -4.802376
+9.925467 5.882082 -6.695897
+7.426975 6.194402 11.342114
+11.379003 8.193646 6.454989
+8.207717 9.758660 -8.432475
+7.724381 6.750428 -8.808343
+7.024567 6.584960 11.638554
+10.140969 7.909540 -6.419449
+8.871038 10.295108 -7.845010
+12.400649 7.468600 -1.263387
+10.173426 6.192666 8.509206
+11.593900 8.011367 -3.924181
+9.233049 4.820097 9.677198
+11.352597 8.280581 -4.448172
+10.382578 5.523170 -6.088322
+10.536784 7.003312 -5.863840
+-4.830463 6.965111 -9.343669
+8.075752 7.434647 10.812739
+12.239098 8.815696 -2.012262
+12.167772 8.975361 -2.286049
+6.465675 6.121817 -9.615920
+12.416100 6.175255 -1.177539
+12.328762 7.164798 3.633171
+-14.142083 5.971424 5.875845
+4.564249 7.545279 13.023433
+11.749210 5.381868 5.587850
+11.204405 7.065555 -4.740796
+-6.365646 7.854759 -8.137804
+-2.950412 5.811823 14.037624
+3.898303 7.728798 -10.655592
+11.837867 7.647293 5.350478
+10.154785 6.543691 8.535179
+2.637704 6.611702 -10.918522
+-5.071965 7.750295 -9.183445
+7.608727 6.840312 11.200500
+12.086434 7.716573 -2.570719
+7.833564 6.971144 11.018254
+7.227764 6.702410 -9.154557
+12.353757 6.196330 3.512425
+11.519611 9.276255 -4.092596
+12.561339 7.555327 -0.061327
+9.993982 8.255218 8.753894
+12.602398 6.957977 0.698539
+1.206998 8.206717 -11.038692
+9.068106 7.843693 -7.652334
+-2.944341 8.058730 14.038463
+12.068024 6.732886 -2.631763
+11.919195 8.530890 -3.088458
+12.605272 7.390704 1.125837
+-3.088801 6.941105 14.017777
+-7.149141 6.443641 -7.304602
+8.799339 8.027759 -7.912921
+-2.858662 7.329884 -10.337826
+12.557043 6.477845 -0.110975
+10.352265 7.967540 -6.131195
+8.907776 8.201834 10.024896
+10.261865 6.202061 -6.256736
+12.474163 8.564901 2.818297
+11.393506 6.564949 -4.363787
+9.039207 7.447365 -7.681156
+12.133834 8.010927 4.427035
+-8.714766 7.360652 11.922957
+12.079281 6.114276 4.616584
+-8.204597 8.298765 -5.784814
+1.651712 8.205140 -11.021253
+-1.083541 8.179799 -10.841956
+-0.120898 6.655204 14.145197
+12.601406 7.663968 1.334745
+10.070922 6.900444 8.650417
+6.105740 7.164763 -9.807074
+12.302969 7.309710 -1.742139
+10.524453 9.410295 -5.882197
+4.216486 7.773072 -10.564980
+-1.096785 8.498021 -10.839385
+9.622041 8.455131 -7.058593
+12.513221 8.262786 2.525405
+9.934394 7.663393 8.832613
+9.324376 6.022754 -7.387754
+-2.986244 7.293205 -10.288466
+9.789091 8.265431 -6.862540
+6.036937 7.172428 12.276046
+8.629953 7.134370 10.303165
+3.874619 5.555624 -10.661937
+12.253143 8.583001 -1.955124
+7.038052 8.301483 -9.276981
+11.180540 7.371983 6.858659
+11.662423 6.351946 -3.762420
+12.601848 8.279064 0.676144
+11.619500 6.735740 -3.864502
+9.613063 8.880852 9.237253
+10.533295 8.348035 -5.869041
+12.328526 5.744434 -1.625756
+5.381018 5.523841 -10.144469
+11.774699 11.182868 -3.482248
+11.742660 7.163888 -3.564266
+2.239191 7.885492 -10.970730
+-1.338280 7.890723 14.168280
+-4.848380 6.971962 -9.332111
+3.801503 7.325862 -10.681176
+9.861624 9.074855 8.927122
+6.149170 8.121749 -9.784869
+8.635898 8.199501 10.297380
+11.684111 8.427819 -3.709831
+2.738467 8.709106 -10.903004
+-0.873876 7.096826 -10.880393
+7.062165 6.830113 11.611816
+11.168130 7.174346 6.882779
+12.014050 7.026136 4.830075
+12.599734 7.267940 1.392283
+1.636717 8.173922 -11.022132
+5.878658 6.877892 12.367368
+11.114161 6.372688 6.986270
+4.156975 7.793698 -10.582694
+8.890907 8.117092 -7.825987
+11.038549 9.213902 -5.046824
+9.588704 10.278598 -7.096707
+-1.596210 8.130804 -10.729814
+-7.818001 6.802782 -6.413011
+10.883703 7.123479 -5.314882
+-2.879242 9.409180 -10.329991
+11.520928 8.247060 -4.089671
+-6.559795 7.054917 13.046762
+9.925937 8.487898 -6.695312
+7.325962 8.729254 11.418696
+11.627454 8.656857 5.892801
+9.505715 7.281099 -7.190184
+8.559530 6.920913 10.371162
+12.312234 6.971117 3.709964
+12.457532 8.181591 -0.927779
+4.875251 7.450266 12.883870
+11.337071 6.822165 6.543266
+-15.107192 9.753317 3.405405
+11.051246 7.690311 -5.024123
+11.647473 7.677173 -3.798270
+9.975793 9.289726 8.778052
+5.456009 10.889844 -10.112374
+11.615572 8.522256 5.921407
+8.908507 6.880747 10.024143
+10.890256 6.658959 -5.303852
+5.758559 8.533858 12.434796
+-4.763545 6.643364 13.666152
+2.011565 7.365296 -10.994029
+11.804479 8.417304 5.441542
+11.560636 6.134197 6.051229
+12.587921 6.384390 0.323744
+5.848055 8.313186 -9.934111
+-0.892825 5.809355 14.171987
+6.452808 7.933476 12.022287
+9.352387 7.723934 9.543182
+5.059926 8.119093 12.796556
+0.095258 6.720846 14.130104
+0.076211 7.247495 -11.002237
+8.820244 7.630691 -7.893237
+8.672126 9.678521 -8.030658
+7.863370 7.756448 -8.704382
+4.866293 8.145004 -10.348036
+11.133804 7.431459 -4.873642
+8.916412 8.737880 -7.801439
+4.386719 7.657166 13.098998
+12.598787 6.559614 0.572708
+10.634473 7.181063 7.820486
+-6.966182 7.771359 12.868818
+5.155142 7.331320 -10.237358
+11.914538 8.017058 5.132875
+9.741015 7.672185 -6.919849
+9.346666 8.207235 9.549690
+-0.517815 7.272222 -10.936008
+11.573480 7.901590 6.021224
+6.725729 8.468981 -9.467427
+11.543897 7.778207 -4.038294
+12.019814 6.125213 4.811731
+0.714647 7.412779 14.068272
+11.961952 7.641765 -2.963300
+12.043830 7.602178 4.734247
+11.001840 8.292581 7.194740
+-1.420704 6.634401 -10.771141
+-8.377614 8.070473 -5.465562
+6.837346 6.973116 -9.400915
+10.851725 7.145330 7.460083
+11.839520 8.157787 -3.310723
+-0.045765 6.816529 14.140329
+7.767068 6.716549 -8.776756
+11.143391 7.994854 -4.855834
+4.686888 6.970783 12.969503
+9.536979 5.656365 9.328494
+9.921247 6.053596 -6.701148
+10.043345 8.633219 -6.546758
+12.380600 8.376366 3.375698
+12.424267 8.227657 -1.130708
+8.834452 8.358028 -7.879805
+3.767730 7.730299 -10.689886
+6.663138 5.369636 -9.503986
+1.190215 6.223768 -11.039000
+12.165972 6.949678 4.309940
+1.518233 8.460718 -11.028366
+3.591034 9.063961 -10.733651
+1.079981 6.817149 14.018742
+-7.273814 7.706763 12.724155
+-9.052916 6.963534 11.702736
+8.996027 7.734946 -7.723853
+1.092803 7.368163 -11.040283
+4.363196 4.346755 13.108791
+0.690027 6.923059 -11.036465
+12.587260 8.779056 1.683240
+6.634957 7.657954 11.904573
+11.975506 8.589762 4.950373
+2.110904 7.448938 -10.984442
+11.540994 9.381644 -4.044822
+6.970112 7.780697 11.676939
+12.388652 6.189067 3.333082
+12.605075 6.378969 0.849993
+10.978084 8.772089 -5.153393
+12.585490 6.986055 1.715030
+2.751024 7.671819 13.664314
+12.537121 8.303021 -0.316649
+1.308415 6.876874 13.982782
+10.865184 9.460500 7.436870
+11.687053 7.223537 5.746323
+10.069951 7.900564 -6.512411
+7.175601 8.364393 -9.188740
+11.678670 7.591856 -3.723091
+12.461858 8.089101 -0.899739
+8.520515 7.664004 10.408408
+2.876256 8.835012 13.628983
+11.474377 8.312249 -4.191893
+0.647410 6.436073 -11.035200
+5.091069 9.039446 -10.262686
+-4.827053 7.633425 13.648665
+8.529233 6.425375 -8.158851
+-0.076436 7.897815 -10.988352
+11.858333 7.899197 -3.259440
+12.550631 6.787005 -0.181148
+12.275964 6.198066 -1.859672
+-7.551833 7.304432 -6.792690
+4.947653 6.204706 -10.317768
+11.973761 9.058357 -2.927937
+-5.622987 6.496857 -8.779646
+1.194783 6.917019 14.001153
+1.335675 6.229102 -11.035485
+9.038580 6.474315 -7.681779
+8.394513 8.804027 -8.275935
+8.215303 6.809095 10.689756
+11.509541 8.333502 6.168581
+12.210172 9.292370 -2.126418
+10.330403 8.084809 -6.161870
+12.566178 6.934155 -0.002417
+4.231479 7.393143 13.162688
+12.318176 6.529940 3.682619
+9.836524 9.706331 -6.805275
+6.608478 7.026045 11.921942
+10.630705 7.832623 -5.721568
+10.174660 5.118122 -6.374681
+12.473536 7.750357 -0.821830
+-12.668798 7.491675 8.270793
+-12.681141 7.181448 8.254342
+12.602678 7.825215 0.710613
+8.643159 7.682691 10.290303
+12.517936 6.741289 -0.487679
+12.503627 6.777442 -0.603181
+11.410904 7.516606 -4.327390
+1.230476 6.764797 -11.038219
+-2.530176 7.028375 -10.456400
+4.307273 7.006116 -10.537270
+11.290951 6.929425 6.638435
+0.579190 5.712850 14.084162
+11.450532 6.135391 6.300325
+-2.573520 6.860170 -10.441445
+11.388337 8.839326 -4.374542
+-0.954032 7.822137 -10.866200
+3.058484 7.543379 13.575299
+11.406602 8.985942 -4.336418
+3.806238 7.834165 -10.679946
+10.352962 8.041787 8.252019
+12.594873 6.920176 0.469262
+6.846841 8.138366 11.762367
+11.060773 6.515609 7.086486
+8.213095 8.047046 -8.428060
+9.757162 6.369705 -6.900682
+4.425773 8.537267 -10.499847
+8.500542 6.321640 10.427360
+3.301702 6.389957 13.499391
+5.278767 7.039769 -10.187218
+12.323542 7.817334 -1.648879
+6.816350 13.010634 -9.413556
+2.257864 11.652705 -10.968609
+10.024868 13.372918 -6.570460
+5.268847 12.710487 12.693676
+-7.648990 14.390018 12.535679
+7.442461 11.185614 11.330241
+10.861154 11.536425 -5.352630
+9.757610 11.431015 -6.900149
+12.539733 12.559690 -0.291576
+10.518617 13.486570 -5.890861
+6.102679 14.897688 12.237279
+6.818181 12.739468 -9.412456
+11.082247 12.576736 7.046426
+-4.953760 11.477301 -9.263072
+11.370186 12.719958 -4.412093
+0.291620 12.105859 14.113495
+10.359657 12.809526 -6.120777
+11.829601 12.905074 -3.337479
+10.795345 13.957346 -5.461048
+1.295528 12.272009 -11.036647
+10.503142 12.260357 8.026400
+12.041934 11.407420 -2.716374
+10.557027 11.828163 -5.833544
+-2.013150 12.391000 14.135999
+-2.751254 13.552213 -10.377934
+7.954050 12.971966 10.917240
+-0.547994 14.528935 14.165238
+12.026954 13.502657 -2.764003
+6.831490 11.947686 -9.404447
+-0.905839 13.302637 14.172077
+6.037660 13.315730 -9.841417
+12.539100 11.968838 2.295000
+5.802316 12.411208 -9.955829
+-2.794968 12.167921 14.058275
+12.595325 12.156981 1.514184
+11.348187 10.741401 6.520029
+11.414030 11.153702 -4.320817
+-0.991859 11.282232 14.172373
+12.042261 11.510797 4.739361
+12.541196 12.940338 -0.277318
+12.602103 13.545561 0.686324
+9.928511 11.325702 -6.692107
+12.509411 11.377441 2.556429
+11.241896 11.205101 -4.668596
+12.605887 10.807339 1.030159
+-15.708033 11.798480 -1.996230
+9.942177 12.886460 -6.675051
+8.102531 12.778732 -8.517768
+8.934461 13.118438 9.997292
+10.080695 11.672861 -6.498469
+11.055420 10.979794 -5.016635
+-8.458384 12.854959 -5.306337
+11.016262 12.748333 7.168472
+2.052397 12.626166 -10.990198
+12.070482 11.433381 4.646170
+11.302940 10.815754 6.613883
+10.239128 13.784510 8.416588
+11.707213 11.737680 -3.653036
+6.226668 11.325156 -9.744666
+11.921001 12.866313 5.113951
+6.548397 12.889001 11.961026
+3.072722 12.203996 -10.844769
+-3.092174 12.500619 -10.246044
+4.408490 11.850078 -10.505394
+8.133011 10.726120 -8.493260
+0.166699 12.214470 14.124382
+-9.422538 13.360862 -2.013800
+11.745099 11.950619 -3.558083
+11.270807 11.691360 -4.612097
+9.553701 13.403114 -7.136375
+8.033626 11.611145 -8.572562
+5.512306 11.541071 -10.087862
+4.805144 11.788611 12.916142
+12.032583 10.748820 4.770748
+7.400781 12.176786 -9.038270
+8.638421 14.775748 -8.061276
+8.608536 12.492453 -8.088225
+10.466820 13.145324 8.081896
+12.310793 12.713380 -1.707071
+12.553119 14.450536 2.150992
+9.044662 13.863294 -7.675731
+6.049073 12.224750 -9.835699
+6.308746 12.163116 12.112509
+11.801321 12.980314 -3.412737
+7.796368 12.046487 11.048959
+12.034987 12.044517 4.762978
+7.685738 13.342734 -8.836680
+4.438912 11.306218 13.077087
+12.602825 13.429962 1.276818
+10.299320 11.764462 8.330233
+6.810508 11.533278 -9.417063
+12.097073 11.420490 -2.534911
+12.397625 11.283920 3.284623
+12.594208 12.992940 1.540732
+12.265588 12.302767 -1.903488
+6.218907 13.352670 -9.748726
+9.823865 12.483501 8.975476
+8.821288 11.272393 -7.892252
+8.890174 12.198831 -7.826691
+11.012217 13.875882 -5.093544
+9.175404 12.335714 9.740652
+10.354194 11.896905 8.250208
+9.825917 13.183954 -6.818144
+12.605451 13.080329 1.106919
+10.222061 14.161065 -6.310945
+4.795493 12.229645 -10.373803
+11.312162 12.446728 -4.529985
+9.148875 11.966735 9.769580
+12.572241 10.591872 1.918801
+12.523526 13.636959 -0.439984
+-2.377144 14.246648 14.105221
+1.891963 13.138079 -11.004381
+9.572581 12.484909 -7.115023
+7.203831 11.524118 -9.170290
+1.215021 11.916613 -11.038536
+5.078032 12.699360 12.787813
+2.857864 12.100193 13.634252
+10.627966 11.916266 -5.725780
+-3.223613 12.470251 -10.191562
+2.656617 14.173930 13.690114
+7.722110 13.614281 -8.810016
+7.084823 14.231250 11.595610
+6.639163 12.587786 11.901806
+12.461683 12.443429 -0.900881
+-3.201935 14.196611 -10.200690
+12.571885 13.312754 0.071914
+11.694428 11.472622 5.727836
+-2.865375 14.017477 14.049136
+11.515710 12.461427 -4.101255
+12.349324 12.775042 -1.526859
+9.435628 13.653247 9.447511
+12.489510 9.970717 -0.709334
+10.752938 11.917662 7.627205
+1.509901 10.585991 13.947846
+12.573589 12.797583 0.095310
+6.431959 11.849570 12.035500
+12.364235 11.613482 3.459987
+11.995984 11.839635 -2.860398
+11.561106 11.746274 6.050134
+10.202932 11.574616 -6.336773
+8.668443 13.876106 10.265578
+12.067683 14.660040 -2.632884
+11.776051 12.477936 5.517463
+-5.792090 13.230049 -8.644095
+9.512349 12.348706 9.357678
+5.731143 14.105920 -9.989136
+6.372725 14.020783 -9.666853
+12.481504 11.511436 2.766999
+5.164611 13.948339 12.745556
+6.211802 12.918318 12.171827
+12.361837 13.506064 -1.465344
+5.710258 13.650101 12.461467
+-8.318645 12.802142 12.164631
+8.571070 12.980166 -8.121752
+11.884710 12.428090 -3.186317
+3.929851 13.009531 -10.647056
+10.174938 11.712964 -6.374309
+7.045846 11.371670 -9.272054
+5.486601 12.413141 -10.099098
+0.559375 11.929560 14.086375
+7.449142 12.459329 11.325107
+5.566381 12.872991 -10.063979
+12.286527 13.118911 -1.814320
+2.916571 11.257396 13.617340
+12.374604 12.621340 3.406936
+3.833434 12.283449 13.316062
+-1.231762 11.409338 -10.812203
+12.210339 12.747229 4.140668
+11.374602 13.289511 6.464335
+7.893535 13.758496 -8.681391
+0.792088 14.361978 14.058588
+11.825154 12.368874 5.385400
+-6.611497 13.245930 -7.896008
+1.038591 12.866272 -11.040626
+5.359629 13.396163 12.647587
+12.524565 12.821981 -0.430942
+5.654192 13.249748 12.492109
+12.224656 13.369546 -2.069830
+12.012856 13.307606 4.833863
+1.208399 13.554493 -11.038665
+9.779401 13.035889 -6.874150
+11.213722 12.968358 -4.722963
+6.578263 13.124704 11.941654
+10.379418 13.508596 -6.092810
+12.600783 12.577681 0.636792
+12.496548 12.115808 -0.657283
+8.943514 11.012765 -7.775191
+6.393159 11.448999 12.059948
+3.169164 12.720775 13.541364
+12.460552 14.052397 2.909725
+11.413797 13.785224 6.380418
+10.479274 13.094805 8.062938
+2.585194 11.950007 -10.926237
+5.179030 13.886467 12.738445
+-4.847286 10.088201 -9.332818
+5.492716 12.821335 -10.096432
+9.484899 12.988775 -7.213322
+12.206100 12.534476 4.157254
+12.386123 12.607700 3.346550
+9.470117 12.896677 -7.229680
+1.986997 13.149403 -10.996262
+11.484164 12.064795 6.225717
+9.003560 12.958334 -7.716435
+11.230558 14.423715 6.760175
+11.719316 13.339674 5.664829
+5.206505 12.015858 12.724837
+4.546260 13.496732 -10.460326
+7.824019 12.647525 -8.734144
+12.296771 13.504773 3.779817
+4.297477 11.727517 -10.540300
+8.769399 10.921765 -7.940944
+12.273029 11.992707 -1.872138
+11.440961 11.379585 -4.263766
+10.176717 12.068973 8.504606
+8.463815 12.436811 -8.216154
+11.757262 15.014985 5.566863
+9.375816 12.689328 -7.332615
+5.909809 11.834022 -9.904393
+-0.335466 12.868908 14.156885
+12.243396 12.320796 -1.994903
+12.018828 13.358858 -2.789560
+9.189282 14.075697 -7.529279
+10.763486 14.288849 -5.512625
+7.484766 12.003201 11.297624
+8.313569 13.434181 -8.344581
+6.614144 13.366956 -9.532238
+10.764833 13.001663 -5.510458
+-0.913614 13.054739 -10.873434
+12.426540 10.750134 3.120766
+8.796618 11.406992 -7.915475
+-8.338705 12.009549 12.152801
+9.746059 10.644060 9.073682
+5.245496 12.543541 -10.200878
+-5.216040 11.922802 13.534704
+12.507921 11.303995 -0.569430
+8.999698 11.851172 -7.720239
+7.999887 11.098867 -8.599084
+12.604435 11.896056 0.802713
+12.309317 12.407041 -1.713724
+9.091001 13.103821 9.832103
+12.578598 13.623783 0.167905
+12.605773 12.973943 0.933723
+3.006113 12.329530 -10.857207
+7.456406 13.072589 -8.999918
+10.497436 12.982610 -5.922165
+-0.006259 13.324285 14.137608
+-2.849774 12.717538 -10.341195
+5.122999 11.719644 -10.250120
+10.748490 12.534318 -5.536704
+11.613397 12.555988 -3.878809
+11.009435 10.958159 -5.098451
+1.519280 12.784584 -11.028317
+-11.360942 13.001116 9.789335
+7.702289 12.774521 11.125631
+-4.811628 11.626781 -9.355764
+12.182510 14.877548 -2.231530
+-2.373355 11.002604 14.105589
+11.731531 12.732347 5.633549
+12.315108 11.833380 3.696773
+11.606759 12.487041 5.942500
+12.165032 10.353026 -2.296078
+5.922907 13.682240 -9.898032
+10.132602 14.255898 8.565916
+-9.535315 12.455212 11.364959
+0.694441 13.177736 -11.036587
+12.445946 12.303132 3.003127
+11.377638 13.586940 6.457891
+7.984023 13.285935 10.891737
+12.597578 11.793295 0.538273
+10.676806 12.012207 7.752274
+-6.659653 14.462967 13.004399
+12.605892 13.361458 0.965502
+1.196684 13.353240 -11.038884
+-2.348789 12.394022 -10.516747
+7.623316 13.919811 -8.881942
+10.969680 14.792566 7.252812
+10.985844 9.951648 7.223712
+10.009515 13.349031 8.733173
+10.859816 12.378512 -5.354860
+-1.343427 12.780648 -10.788363
+7.629084 13.132946 11.184326
+12.573159 12.563987 1.906156
+10.228458 13.872663 -6.302276
+8.862402 11.989036 10.071472
+11.042450 11.174950 7.120405
+9.611716 12.667233 -7.070433
+11.435302 10.461704 6.333704
+12.507807 10.900643 2.569299
+12.457351 11.110768 -0.928945
+-6.162348 11.854508 -8.326001
+5.336361 13.401175 -10.163283
+9.354740 11.414215 -7.355292
+6.723762 11.572013 11.845674
+12.591959 13.412603 0.403892
+12.578839 13.500807 1.823640
+8.058742 11.480587 -8.552688
+2.518108 13.533819 -10.935723
+11.317532 13.155457 6.583824
+10.954981 12.570638 7.279126
+8.754584 11.017792 -7.954738
+7.594358 11.167188 -8.902726
+7.799488 11.127066 -8.752565
+11.618796 13.341903 -3.866156
+8.449134 11.947829 10.475784
+12.556698 12.974315 2.111241
+1.479085 11.899375 -11.030147
+12.567905 11.801877 0.019475
+7.194893 11.643279 11.515872
+12.464745 11.370067 2.882043
+11.490820 13.273288 6.210803
+6.042864 12.211185 12.272572
+10.882708 13.492933 -5.316554
+11.987782 11.876503 -2.885477
+11.947972 12.593412 -3.004712
+12.490590 12.422387 -0.701453
+12.157602 11.169810 -2.323104
+12.581520 12.124597 0.213393
+6.023082 11.662650 12.284153
+2.879205 13.263030 -10.879754
+7.738969 12.845647 -8.797583
+9.562994 11.629199 -7.125878
+10.923424 12.459630 -5.247616
+7.238147 12.918732 -9.147704
+0.701508 11.476862 -11.036778
+11.972887 12.732145 -2.930565
+10.041759 12.371146 -6.548798
+12.516962 12.507152 -0.495828
+10.494884 12.155889 -5.925923
+12.458969 12.129027 2.920068
+6.929190 10.921235 -9.344902
+3.280984 13.624737 -10.803188
+12.451864 11.441014 -0.963893
+5.860639 13.207941 12.377587
+5.831496 10.374859 -9.942002
+12.299420 11.651606 -1.757889
+9.875874 10.482600 8.908753
+11.285587 14.196296 -4.582928
+9.302950 13.222287 9.599141
+11.976094 10.925272 -2.920908
+3.225604 11.425352 13.523669
+-6.657874 11.786150 -7.848530
+1.812385 13.490156 -11.010549
+11.438219 12.407394 -4.269611
+11.851548 13.250619 5.312542
+8.254149 12.912385 10.654906
+7.655285 12.787032 11.163414
+4.965419 12.641398 12.841658
+3.861025 12.735867 13.305884
+9.688245 12.490498 9.145440
+12.399469 12.610112 3.274533
+7.573658 13.352368 -8.917501
+10.485309 13.939863 8.053724
+10.550176 12.944262 7.953620
+-1.619197 11.458821 -10.724171
+12.353292 12.051927 -1.507523
+12.352157 12.983628 3.520332
+12.071890 12.920490 4.641454
+4.737487 13.452414 -10.394520
+10.399206 12.404573 -6.064635
+11.392349 12.432645 6.426532
+12.480185 12.728288 2.776325
+11.565229 13.740171 -3.989998
+12.145384 13.470776 -2.367033
+12.477754 13.225845 -0.792832
+-3.348618 11.879689 -10.137822
+12.318879 11.227971 3.679364
+4.231117 11.635667 13.162834
+-5.480287 12.160751 -8.889617
+12.591062 12.795535 0.385167
+12.457524 12.755093 -0.927828
+10.223449 12.351174 8.438844
+9.593354 13.172997 -7.091410
+11.575734 12.112569 -3.966005
+12.152331 11.443093 4.360170
+12.348663 14.256436 -1.530064
+6.847284 11.194604 11.762064
+4.073817 14.546925 13.225130
+10.824423 12.162309 -5.413460
+11.865408 13.033303 5.273727
+10.140920 11.969731 -6.419514
+5.192349 11.128866 12.731858
+-3.407399 12.633173 -10.111893
+8.587361 12.673915 -8.107210
+-15.338441 12.281832 2.524663
+11.303333 13.162835 6.613076
+9.755416 12.628960 9.061971
+8.905650 11.418342 10.027089
+11.578258 12.843210 -3.960219
+10.973124 12.361134 -5.162023
+12.531230 12.972506 2.369204
+12.381398 12.228963 3.371510
+12.186071 12.875782 -2.218209
+-4.358588 12.063279 -9.630134
+8.443310 12.382025 10.481237
+9.553932 11.246692 -7.136115
+8.503806 13.110051 10.424269
+12.406159 13.831786 -1.233169
+12.440832 12.176057 -1.032280
+9.612878 11.898241 -7.069101
+-0.945664 11.501216 -10.867711
\ No newline at end of file