You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by ch...@apache.org on 2018/08/21 14:49:22 UTC

[1/2] ignite git commit: IGNITE-9336: [ML] ANN/SVM Trainer tests produce unpredictable results due to random data generation

Repository: ignite
Updated Branches:
  refs/heads/master a09b2c057 -> bba6adf82


http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java
index f771dae..d16ae72 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java
@@ -21,10 +21,9 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -32,42 +31,23 @@ import static org.junit.Assert.assertEquals;
 /**
  * Tests for {@link LinearRegressionLSQRTrainer}.
  */
-@RunWith(Parameterized.class)
-public class LinearRegressionLSQRTrainerTest {
-    /** Parameters. */
-    @Parameterized.Parameters(name = "Data divided on {0} partitions")
-    public static Iterable<Integer[]> data() {
-        return Arrays.asList(
-            new Integer[] {1},
-            new Integer[] {2},
-            new Integer[] {3},
-            new Integer[] {5},
-            new Integer[] {7},
-            new Integer[] {100},
-            new Integer[] {1000}
-        );
-    }
-
-    /** Number of partitions. */
-    @Parameterized.Parameter
-    public int parts;
-
+public class LinearRegressionLSQRTrainerTest extends TrainerTest {
     /**
      * Tests {@code fit()} method on a simple small dataset.
      */
     @Test
     public void testSmallDataFit() {
         Map<Integer, double[]> data = new HashMap<>();
-        data.put(0, new double[] {-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107});
-        data.put(1, new double[] {-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867});
-        data.put(2, new double[] {0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728});
-        data.put(3, new double[] {-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991});
-        data.put(4, new double[] {0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611});
-        data.put(5, new double[] {0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197});
-        data.put(6, new double[] {-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012});
-        data.put(7, new double[] {-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889});
-        data.put(8, new double[] {0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949});
-        data.put(9, new double[] {-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583});
+        data.put(0, new double[]{-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107});
+        data.put(1, new double[]{-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867});
+        data.put(2, new double[]{0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728});
+        data.put(3, new double[]{-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991});
+        data.put(4, new double[]{0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611});
+        data.put(5, new double[]{0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197});
+        data.put(6, new double[]{-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012});
+        data.put(7, new double[]{-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889});
+        data.put(8, new double[]{0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949});
+        data.put(9, new double[]{-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583});
 
         LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();
 
@@ -79,7 +59,7 @@ public class LinearRegressionLSQRTrainerTest {
         );
 
         assertArrayEquals(
-            new double[]{72.26948107,  15.95144674,  24.07403921,  66.73038781},
+            new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781},
             mdl.getWeights().getStorage().data(),
             1e-6
         );

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java
index ee38938..349e712 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java
@@ -20,13 +20,12 @@ package org.apache.ignite.ml.regressions.linear;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.apache.ignite.ml.nn.UpdatesStrategy;
 import org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate;
 import org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -34,47 +33,29 @@ import static org.junit.Assert.assertEquals;
 /**
  * Tests for {@link LinearRegressionSGDTrainer}.
  */
-@RunWith(Parameterized.class)
-public class LinearRegressionSGDTrainerTest {
-    /** Parameters. */
-    @Parameterized.Parameters(name = "Data divided on {0} partitions")
-    public static Iterable<Integer[]> data() {
-        return Arrays.asList(
-            new Integer[] {1},
-            new Integer[] {2},
-            new Integer[] {3},
-            new Integer[] {5},
-            new Integer[] {7},
-            new Integer[] {100}
-        );
-    }
-
-    /** Number of partitions. */
-    @Parameterized.Parameter
-    public int parts;
-
+public class LinearRegressionSGDTrainerTest extends TrainerTest {
     /**
      * Tests {@code fit()} method on a simple small dataset.
      */
     @Test
     public void testSmallDataFit() {
         Map<Integer, double[]> data = new HashMap<>();
-        data.put(0, new double[] {-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107});
-        data.put(1, new double[] {-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867});
-        data.put(2, new double[] {0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728});
-        data.put(3, new double[] {-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991});
-        data.put(4, new double[] {0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611});
-        data.put(5, new double[] {0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197});
-        data.put(6, new double[] {-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012});
-        data.put(7, new double[] {-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889});
-        data.put(8, new double[] {0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949});
-        data.put(9, new double[] {-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583});
+        data.put(0, new double[]{-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107});
+        data.put(1, new double[]{-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867});
+        data.put(2, new double[]{0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728});
+        data.put(3, new double[]{-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991});
+        data.put(4, new double[]{0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611});
+        data.put(5, new double[]{0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197});
+        data.put(6, new double[]{-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012});
+        data.put(7, new double[]{-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889});
+        data.put(8, new double[]{0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949});
+        data.put(9, new double[]{-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583});
 
         LinearRegressionSGDTrainer<?> trainer = new LinearRegressionSGDTrainer<>(new UpdatesStrategy<>(
             new RPropUpdateCalculator(),
             RPropParameterUpdate::sumLocal,
             RPropParameterUpdate::avg
-        ), 100000,  10, 100, 123L);
+        ), 100000, 10, 100, 123L);
 
         LinearRegressionModel mdl = trainer.fit(
             data,
@@ -84,7 +65,7 @@ public class LinearRegressionSGDTrainerTest {
         );
 
         assertArrayEquals(
-            new double[] {72.26948107, 15.95144674, 24.07403921, 66.73038781},
+            new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781},
             mdl.getWeights().getStorage().data(),
             1e-1
         );

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java
index e0e6a71..1f8c5d1 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java
@@ -20,52 +20,31 @@ package org.apache.ignite.ml.regressions.logistic;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.concurrent.ThreadLocalRandom;
 import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
 import org.apache.ignite.ml.nn.UpdatesStrategy;
 import org.apache.ignite.ml.optimization.SmoothParametrized;
 import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate;
 import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator;
 import org.apache.ignite.ml.regressions.logistic.multiclass.LogRegressionMultiClassModel;
 import org.apache.ignite.ml.regressions.logistic.multiclass.LogRegressionMultiClassTrainer;
-import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationTrainer;
 import org.junit.Assert;
 import org.junit.Test;
 
 /**
- * Tests for {@link SVMLinearBinaryClassificationTrainer}.
+ * Tests for {@link LogRegressionMultiClassTrainer}.
  */
-public class LogRegMultiClassTrainerTest {
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** Precision in test checks. */
-    private static final double PRECISION = 1e-2;
-
+public class LogRegMultiClassTrainerTest extends TrainerTest {
     /**
-     * Test trainer on classification model y = x.
+     * Test trainer on 4 sets grouped around of square vertices.
      */
     @Test
     public void testTrainWithTheLinearlySeparableCase() {
-        Map<Integer, double[]> data = new HashMap<>();
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
+        Map<Integer, double[]> cacheMock = new HashMap<>();
 
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = y - x > 0 ? 1 : -1; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        for (int i = 0; i < fourSetsInSquareVertices.length; i++)
+            cacheMock.put(i, fourSetsInSquareVertices[i]);
 
         final UpdatesStrategy<SmoothParametrized, SimpleGDParameterUpdate> stgy = new UpdatesStrategy<>(
             new SimpleGDUpdateCalculator(0.2),
@@ -87,8 +66,8 @@ public class LogRegMultiClassTrainerTest {
         Assert.assertEquals(trainer.updatesStgy(), stgy);
 
         LogRegressionMultiClassModel mdl = trainer.fit(
-            data,
-            10,
+            cacheMock,
+            parts,
             (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 1, v.length)),
             (k, v) -> v[0]
         );
@@ -97,7 +76,9 @@ public class LogRegMultiClassTrainerTest {
         Assert.assertTrue(mdl.toString(true).length() > 0);
         Assert.assertTrue(mdl.toString(false).length() > 0);
 
-        TestUtils.assertEquals(-1, mdl.apply(new DenseVector(new double[]{100, 10})), PRECISION);
-        TestUtils.assertEquals(1, mdl.apply(new DenseVector(new double[]{10, 100})), PRECISION);
+        TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(10, 10)), PRECISION);
+        TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(-10, 10)), PRECISION);
+        TestUtils.assertEquals(2, mdl.apply(VectorUtils.of(-10, -10)), PRECISION);
+        TestUtils.assertEquals(3, mdl.apply(VectorUtils.of(10, -10)), PRECISION);
     }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java
index 9dd35ef..5bd2dbd 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionSGDTrainerTest.java
@@ -20,84 +20,44 @@ package org.apache.ignite.ml.regressions.logistic;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.concurrent.ThreadLocalRandom;
 import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
 import org.apache.ignite.ml.nn.UpdatesStrategy;
 import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate;
 import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator;
 import org.apache.ignite.ml.regressions.logistic.binomial.LogisticRegressionModel;
 import org.apache.ignite.ml.regressions.logistic.binomial.LogisticRegressionSGDTrainer;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 /**
  * Tests for {@link LogisticRegressionSGDTrainer}.
  */
-@RunWith(Parameterized.class)
-public class LogisticRegressionSGDTrainerTest {
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** Precision in test checks. */
-    private static final double PRECISION = 1e-2;
-
-    /** Parameters. */
-    @Parameterized.Parameters(name = "Data divided on {0} partitions")
-    public static Iterable<Integer[]> data() {
-        return Arrays.asList(
-            new Integer[] {1},
-            new Integer[] {2},
-            new Integer[] {3},
-            new Integer[] {5},
-            new Integer[] {7},
-            new Integer[] {100}
-        );
-    }
-
-    /** Number of partitions. */
-    @Parameterized.Parameter
-    public int parts;
-
+public class LogisticRegressionSGDTrainerTest extends TrainerTest {
     /**
      * Test trainer on classification model y = x.
      */
     @Test
     public void trainWithTheLinearlySeparableCase() {
-        Map<Integer, double[]> data = new HashMap<>();
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
+        Map<Integer, double[]> cacheMock = new HashMap<>();
 
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = y - x > 0 ? 1 : 0; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        for (int i = 0; i < twoLinearlySeparableClasses.length; i++)
+            cacheMock.put(i, twoLinearlySeparableClasses[i]);
 
         LogisticRegressionSGDTrainer<?> trainer = new LogisticRegressionSGDTrainer<>(new UpdatesStrategy<>(
             new SimpleGDUpdateCalculator().withLearningRate(0.2),
             SimpleGDParameterUpdate::sumLocal,
             SimpleGDParameterUpdate::avg
-        ), 100000,  10, 100, 123L);
+        ), 100000, 10, 100, 123L);
 
         LogisticRegressionModel mdl = trainer.fit(
-            data,
-            10,
+            cacheMock,
+            parts,
             (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 1, v.length)),
             (k, v) -> v[0]
         );
 
-        TestUtils.assertEquals(0, mdl.apply(new DenseVector(new double[]{100, 10})), PRECISION);
-        TestUtils.assertEquals(1, mdl.apply(new DenseVector(new double[]{10, 100})), PRECISION);
+        TestUtils.assertEquals(0, mdl.apply(VectorUtils.of(100, 10)), PRECISION);
+        TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(10, 100)), PRECISION);
     }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
index ae94dd2..b772177 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
@@ -20,55 +20,35 @@ package org.apache.ignite.ml.svm;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.concurrent.ThreadLocalRandom;
 import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
 import org.junit.Test;
 
 /**
  * Tests for {@link SVMLinearBinaryClassificationTrainer}.
  */
-public class SVMBinaryTrainerTest {
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** Precision in test checks. */
-    private static final double PRECISION = 1e-2;
-
+public class SVMBinaryTrainerTest extends TrainerTest {
     /**
      * Test trainer on classification model y = x.
      */
     @Test
     public void testTrainWithTheLinearlySeparableCase() {
-        Map<Integer, double[]> data = new HashMap<>();
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
+        Map<Integer, double[]> cacheMock = new HashMap<>();
 
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = y - x > 0 ? 1 : -1; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        for (int i = 0; i < twoLinearlySeparableClasses.length; i++)
+            cacheMock.put(i, twoLinearlySeparableClasses[i]);
 
         SVMLinearBinaryClassificationTrainer trainer = new SVMLinearBinaryClassificationTrainer();
 
         SVMLinearBinaryClassificationModel mdl = trainer.fit(
-            data,
-            10,
+            cacheMock,
+            parts,
             (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 1, v.length)),
             (k, v) -> v[0]
         );
 
-        TestUtils.assertEquals(-1, mdl.apply(new DenseVector(new double[]{100, 10})), PRECISION);
-        TestUtils.assertEquals(1, mdl.apply(new DenseVector(new double[]{10, 100})), PRECISION);
+        TestUtils.assertEquals(-1, mdl.apply(VectorUtils.of(100, 10)), PRECISION);
+        TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(10, 100)), PRECISION);
     }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMMultiClassTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMMultiClassTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMMultiClassTrainerTest.java
index b12b266..f2328f8 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMMultiClassTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMMultiClassTrainerTest.java
@@ -20,58 +20,37 @@ package org.apache.ignite.ml.svm;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.concurrent.ThreadLocalRandom;
 import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
 import org.junit.Test;
 
 /**
  * Tests for {@link SVMLinearBinaryClassificationTrainer}.
  */
-public class SVMMultiClassTrainerTest {
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** Precision in test checks. */
-    private static final double PRECISION = 1e-2;
-
+public class SVMMultiClassTrainerTest extends TrainerTest {
     /**
-     * Test trainer on classification model y = x.
+     * Test trainer on 4 sets grouped around of square vertices.
      */
     @Test
     public void testTrainWithTheLinearlySeparableCase() {
-        Map<Integer, double[]> data = new HashMap<>();
+        Map<Integer, double[]> cacheMock = new HashMap<>();
 
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
-
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = y - x > 0 ? 1 : -1; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        for (int i = 0; i < twoLinearlySeparableClasses.length; i++)
+            cacheMock.put(i, twoLinearlySeparableClasses[i]);
 
         SVMLinearMultiClassClassificationTrainer trainer = new SVMLinearMultiClassClassificationTrainer()
             .withLambda(0.3)
-            .withAmountOfLocIterations(100)
+            .withAmountOfLocIterations(10)
             .withAmountOfIterations(20);
 
         SVMLinearMultiClassClassificationModel mdl = trainer.fit(
-            data,
-            10,
+            cacheMock,
+            parts,
             (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 1, v.length)),
             (k, v) -> v[0]
         );
-
-        TestUtils.assertEquals(-1, mdl.apply(new DenseVector(new double[]{100, 10})), PRECISION);
-        TestUtils.assertEquals(1, mdl.apply(new DenseVector(new double[]{10, 100})), PRECISION);
+        TestUtils.assertEquals(0, mdl.apply(VectorUtils.of(100, 10)), PRECISION);
+        TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(10, 100)), PRECISION);
     }
 }


[2/2] ignite git commit: IGNITE-9336: [ML] ANN/SVM Trainer tests produce unpredictable results due to random data generation

Posted by ch...@apache.org.
IGNITE-9336: [ML] ANN/SVM Trainer tests produce unpredictable results
due to random data generation

this closes #4585


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/bba6adf8
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/bba6adf8
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/bba6adf8

Branch: refs/heads/master
Commit: bba6adf82c66adf9979cf72df6aece004283ca47
Parents: a09b2c0
Author: Zinoviev Alexey <za...@gmail.com>
Authored: Tue Aug 21 17:49:11 2018 +0300
Committer: Yury Babak <yb...@gridgain.com>
Committed: Tue Aug 21 17:49:11 2018 +0300

----------------------------------------------------------------------
 .../clustering/KMeansClusterizationExample.java |    2 +-
 .../ml/knn/KNNClassificationExample.java        |    2 +-
 .../examples/ml/knn/KNNRegressionExample.java   |    2 +-
 .../LinearRegressionLSQRTrainerExample.java     |    2 +-
 .../LinearRegressionSGDTrainerExample.java      |    2 +-
 .../LogisticRegressionSGDTrainerExample.java    |    2 +-
 .../split/TrainTestDatasetSplitterExample.java  |    2 +-
 .../binary/SVMBinaryClassificationExample.java  |    2 +-
 .../RandomForestClassificationExample.java      |    2 +-
 .../RandomForestRegressionExample.java          |    2 +-
 .../ignite/examples/ml/util/TestCache.java      |    2 +-
 .../ignite/ml/clustering/KMeansTrainerTest.java |   28 +-
 .../apache/ignite/ml/common/TrainerTest.java    | 1161 ++++++++++++++++++
 .../ignite/ml/knn/ANNClassificationTest.java    |   70 +-
 .../linear/LinearRegressionLSQRTrainerTest.java |   46 +-
 .../linear/LinearRegressionSGDTrainerTest.java  |   47 +-
 .../logistic/LogRegMultiClassTrainerTest.java   |   45 +-
 .../LogisticRegressionSGDTrainerTest.java       |   60 +-
 .../ignite/ml/svm/SVMBinaryTrainerTest.java     |   38 +-
 .../ignite/ml/svm/SVMMultiClassTrainerTest.java |   43 +-
 20 files changed, 1266 insertions(+), 294 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
index 5304a70..c310302 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
@@ -56,7 +56,7 @@ public class KMeansClusterizationExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 KMeansClusterizationExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 KMeansTrainer trainer = new KMeansTrainer()
                     .withSeed(7867L);

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNClassificationExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNClassificationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNClassificationExample.java
index 541e70c..77a48ad 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNClassificationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNClassificationExample.java
@@ -57,7 +57,7 @@ public class KNNClassificationExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 KNNClassificationExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 KNNClassificationTrainer trainer = new KNNClassificationTrainer();
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNRegressionExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNRegressionExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNRegressionExample.java
index 3803eea..a28ecc4 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNRegressionExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/knn/KNNRegressionExample.java
@@ -58,7 +58,7 @@ public class KNNRegressionExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 KNNRegressionExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 KNNRegressionTrainer trainer = new KNNRegressionTrainer();
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionLSQRTrainerExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionLSQRTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionLSQRTrainerExample.java
index efababf..085a089 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionLSQRTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionLSQRTrainerExample.java
@@ -111,7 +111,7 @@ public class LinearRegressionLSQRTrainerExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 LinearRegressionLSQRTrainerExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 System.out.println(">>> Create new linear regression trainer object.");
                 LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionSGDTrainerExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionSGDTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionSGDTrainerExample.java
index bc7cd6f..0a1e966 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionSGDTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/LinearRegressionSGDTrainerExample.java
@@ -116,7 +116,7 @@ public class LinearRegressionSGDTrainerExample {
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 LinearRegressionSGDTrainerExample.class.getSimpleName(), () -> {
 
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 System.out.println(">>> Create new linear regression trainer object.");
                 LinearRegressionSGDTrainer<?> trainer = new LinearRegressionSGDTrainer<>(new UpdatesStrategy<>(

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/regression/logistic/binary/LogisticRegressionSGDTrainerExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/regression/logistic/binary/LogisticRegressionSGDTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/regression/logistic/binary/LogisticRegressionSGDTrainerExample.java
index 1fe38f3..99b5677 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/regression/logistic/binary/LogisticRegressionSGDTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/regression/logistic/binary/LogisticRegressionSGDTrainerExample.java
@@ -60,7 +60,7 @@ public class LogisticRegressionSGDTrainerExample {
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 LogisticRegressionSGDTrainerExample.class.getSimpleName(), () -> {
 
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 System.out.println(">>> Create new logistic regression trainer object.");
                 LogisticRegressionSGDTrainer<?> trainer = new LogisticRegressionSGDTrainer<>(new UpdatesStrategy<>(

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/selection/split/TrainTestDatasetSplitterExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/selection/split/TrainTestDatasetSplitterExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/selection/split/TrainTestDatasetSplitterExample.java
index 53bd6b5..fa1c2ca 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/selection/split/TrainTestDatasetSplitterExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/selection/split/TrainTestDatasetSplitterExample.java
@@ -115,7 +115,7 @@ public class TrainTestDatasetSplitterExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 TrainTestDatasetSplitterExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 System.out.println(">>> Create new linear regression trainer object.");
                 LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/svm/binary/SVMBinaryClassificationExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/svm/binary/SVMBinaryClassificationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/svm/binary/SVMBinaryClassificationExample.java
index b923f4f..bd88c20 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/svm/binary/SVMBinaryClassificationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/svm/binary/SVMBinaryClassificationExample.java
@@ -56,7 +56,7 @@ public class SVMBinaryClassificationExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 SVMBinaryClassificationExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 SVMLinearBinaryClassificationTrainer trainer = new SVMLinearBinaryClassificationTrainer();
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestClassificationExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestClassificationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestClassificationExample.java
index 528adc9..09c1e4f 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestClassificationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestClassificationExample.java
@@ -53,7 +53,7 @@ public class RandomForestClassificationExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                     RandomForestClassificationExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 RandomForestClassifierTrainer trainer = new RandomForestClassifierTrainer(13, 4, 101, 0.3, 2, 0);
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestRegressionExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestRegressionExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestRegressionExample.java
index 3cd4cd2..b83d526 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestRegressionExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/randomforest/RandomForestRegressionExample.java
@@ -58,7 +58,7 @@ public class RandomForestRegressionExample {
 
             IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
                 RandomForestRegressionExample.class.getSimpleName(), () -> {
-                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).get(data);
+                IgniteCache<Integer, double[]> dataCache = new TestCache(ignite).fillCacheWith(data);
 
                 RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(13, 4, 101, 0.3, 2, 0);
                 trainer.setEnvironment(LearningEnvironment.builder()

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/examples/src/main/java/org/apache/ignite/examples/ml/util/TestCache.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/util/TestCache.java b/examples/src/main/java/org/apache/ignite/examples/ml/util/TestCache.java
index 9de20d6..454aa76 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/util/TestCache.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/util/TestCache.java
@@ -43,7 +43,7 @@ public class TestCache {
      * @param data Data to fill the cache with.
      * @return Filled Ignite Cache.
      */
-    public IgniteCache<Integer, double[]> get(double[][] data) {
+    public IgniteCache<Integer, double[]> fillCacheWith(double[][] data) {
         CacheConfiguration<Integer, double[]> cacheConfiguration = new CacheConfiguration<>();
         cacheConfiguration.setName("TEST_" + UUID.randomUUID());
         cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 10));

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java
index 420e4fb..aae5af1 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansTrainerTest.java
@@ -45,12 +45,12 @@ public class KMeansTrainerTest {
     @Test
     public void findOneClusters() {
         Map<Integer, double[]> data = new HashMap<>();
-        data.put(0, new double[] {1.0, 1.0, 1.0});
-        data.put(1, new double[] {1.0, 2.0, 1.0});
-        data.put(2, new double[] {2.0, 1.0, 1.0});
-        data.put(3, new double[] {-1.0, -1.0, 2.0});
-        data.put(4, new double[] {-1.0, -2.0, 2.0});
-        data.put(5, new double[] {-2.0, -1.0, 2.0});
+        data.put(0, new double[]{1.0, 1.0, 1.0});
+        data.put(1, new double[]{1.0, 2.0, 1.0});
+        data.put(2, new double[]{2.0, 1.0, 1.0});
+        data.put(3, new double[]{-1.0, -1.0, 2.0});
+        data.put(4, new double[]{-1.0, -2.0, 2.0});
+        data.put(5, new double[]{-2.0, -1.0, 2.0});
 
         KMeansTrainer trainer = new KMeansTrainer()
             .withDistance(new EuclideanDistance())
@@ -62,15 +62,17 @@ public class KMeansTrainerTest {
         assertEquals(2, trainer.getSeed());
         assertTrue(trainer.getDistance() instanceof EuclideanDistance);
 
-        KMeansModel knnMdl = trainer.withK(1).fit(
-            new LocalDatasetBuilder<>(data, 2),
-            (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 0, v.length - 1)),
-            (k, v) -> v[2]
-        );
+        KMeansModel knnMdl = trainer
+            .withK(1)
+            .fit(
+                new LocalDatasetBuilder<>(data, 2),
+                (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 0, v.length - 1)),
+                (k, v) -> v[2]
+            );
 
-        Vector firstVector = new DenseVector(new double[] {2.0, 2.0});
+        Vector firstVector = new DenseVector(new double[]{2.0, 2.0});
         assertEquals(knnMdl.apply(firstVector), 0.0, PRECISION);
-        Vector secondVector = new DenseVector(new double[] {-2.0, -2.0});
+        Vector secondVector = new DenseVector(new double[]{-2.0, -2.0});
         assertEquals(knnMdl.apply(secondVector), 0.0, PRECISION);
         assertEquals(trainer.getMaxIterations(), 1);
         assertEquals(trainer.getEpsilon(), PRECISION, PRECISION);

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java
new file mode 100644
index 0000000..678ed44
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/common/TrainerTest.java
@@ -0,0 +1,1161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.common;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+/**
+ * Basic fields and methods for the trainer tests.
+ */
+@RunWith(Parameterized.class)
+public class TrainerTest {
+    /** Number of parts to be tested. */
+    private static final int[] partsToBeTested = new int[]{1, 2, 3, 4, 5, 7, 100};
+
+    /** Parameters. */
+    @Parameterized.Parameters(name = "Data divided on {0} partitions, training with batch size {1}")
+    public static Iterable<Integer[]> data() {
+        List<Integer[]> res = new ArrayList<>();
+
+        for (int part : partsToBeTested)
+            res.add(new Integer[]{part});
+
+        return res;
+    }
+
+    /** Number of partitions. */
+    @Parameterized.Parameter
+    public int parts;
+
+    /** Precision in test checks. */
+    protected static final double PRECISION = 1e-2;
+
+    /** Two-easy clustered data. */
+    protected static final double[][] twoClusters = {
+        {0, 519.9017766224466, 554.4100892224841},
+        {0, 563.5609233456146, 558.5857619285702},
+        {0, 503.5549215892729, 594.3825404658926},
+        {0, 584.7460223841858, 515.2243614011547},
+        {0, 575.095839624477, 590.8556618187845},
+        {0, 594.3592060102463, 554.2221434279162},
+        {0, 583.432820535236, 504.66164764881523},
+        {0, 599.0963460154512, 534.1774623344388},
+        {0, 568.9703081604248, 543.2226391011388},
+        {0, 586.6698629586531, 529.5241964168969},
+        {0, 551.1051323168858, 539.1885401513679},
+        {0, 508.4609024546371, 504.35073029226396},
+        {0, 599.0470661333914, 569.4595846036917},
+        {0, 570.5493551454197, 526.7253349784085},
+        {0, 534.2832458435303, 550.3000463382016},
+        {0, 594.4616179647461, 536.3197487506842},
+        {0, 565.3197172280577, 506.3293991999001},
+        {0, 592.6602122456759, 513.646808538896},
+        {0, 509.8216048850749, 509.4973240875119},
+        {0, 502.3878128815718, 570.9482197992043},
+        {0, 594.6632085763065, 547.9275009326266},
+        {0, 529.6467177083762, 547.9107158851994},
+        {0, 544.9626346641528, 567.3832919235468},
+        {0, 511.4105135690089, 578.1849565872583},
+        {0, 501.01584549257973, 570.6868576016038},
+        {0, 595.8080144542582, 512.03499265368},
+        {0, 528.786843178995, 502.8166496868458},
+        {0, 528.6621082789842, 560.8712577770658},
+        {0, 510.8974224808237, 596.4667253000505},
+        {0, 583.8947380467763, 547.9688139648637},
+        {0, 561.4766784411281, 531.2449896695659},
+        {0, 560.6943663394893, 566.9710095676068},
+        {0, 517.393777179133, 588.7651419118193},
+        {0, 500.4713974957799, 528.0769354138976},
+        {0, 545.8783916658755, 586.1791106273984},
+        {0, 587.1987551324714, 552.7968581692342},
+        {0, 504.14955324617733, 502.9202365190475},
+        {0, 589.118356537786, 567.5453447798067},
+        {0, 581.0404600079042, 524.3383641814191},
+        {0, 578.836850556919, 519.0303628080188},
+        {0, 532.684541905037, 592.0373074571884},
+        {0, 539.631541540315, 500.86701934899133},
+        {0, 585.080559785121, 559.185605736917},
+        {0, 557.6130747490417, 586.9060188494332},
+        {0, 511.4069711786483, 505.20182772247955},
+        {0, 543.3420695017039, 589.0522243776551},
+        {0, 545.7836567392021, 545.9829264066165},
+        {0, 587.4404520697882, 566.2450515524025},
+        {0, 598.0352806197182, 592.9871556855218},
+        {0, 599.1191676869415, 517.072913155282},
+        {0, 598.7990121325806, 542.5922389368699},
+        {0, 567.9157541778169, 508.8637304888606},
+        {0, 516.9141893487038, 504.5333015373364},
+        {0, 528.2650000284832, 592.3618290091457},
+        {0, 577.0877824827497, 572.106440915086},
+        {0, 569.5034479656674, 513.1883531774486},
+        {0, 587.7126777761002, 568.9323649263932},
+        {0, 565.9489368582279, 516.9745616328178},
+        {0, 557.5589060305804, 515.2687667913198},
+        {0, 503.1554198985989, 509.09477188561954},
+        {0, 550.0203572858189, 595.1223421437577},
+        {0, 524.7913631016987, 523.3640528148924},
+        {0, 552.7246513026029, 546.2810129784725},
+        {0, 586.3892191983499, 552.7576239548819},
+        {0, 526.0748315118926, 573.804342015302},
+        {0, 565.1398123093003, 539.6854465576956},
+        {0, 527.0537447563926, 595.2059572407275},
+        {0, 598.4431244531863, 518.7675712573573},
+        {0, 518.1347648644486, 571.2772685572616},
+        {0, 522.0665003535328, 597.4691949058798},
+        {0, 559.3717433904218, 507.63523020707987},
+        {0, 517.7519710704423, 595.9228343205995},
+        {0, 557.1028047052068, 513.67799332853},
+        {0, 527.9783249961056, 596.5923404246605},
+        {0, 508.9548667053109, 583.3851484560171},
+        {0, 597.3054599709918, 572.0492942719156},
+        {0, 506.48170301986886, 545.2749213691201},
+        {0, 569.5215580939445, 552.2362437646713},
+        {0, 530.5232047696994, 517.814585379635},
+        {0, 582.7447646378554, 554.0837636670908},
+        {0, 510.04656659835496, 548.308864572033},
+        {0, 517.0884034675382, 503.6293035255885},
+        {0, 547.4077952612713, 521.8105170207767},
+        {0, 525.2452470246204, 565.7690087891091},
+        {0, 525.726872006642, 592.172865284197},
+        {0, 598.311246268818, 506.29428096115674},
+        {0, 599.4974643204109, 579.8062124124598},
+        {0, 584.7506624741848, 592.2505541944379},
+        {0, 598.7379007956142, 561.8346831647877},
+        {0, 553.9325403298083, 540.4895037718127},
+        {0, 577.4868596401562, 533.9482256583582},
+        {0, 524.7729276101758, 523.3563039535018},
+        {0, 513.6033305233657, 572.2592770048955},
+        {0, 574.5120210087475, 557.5521505158835},
+        {0, 573.951281294893, 527.3670057739082},
+        {0, 548.1326423460839, 551.1839666791825},
+        {0, 508.2214563147455, 521.2342805765958},
+        {0, 515.93448815859, 511.17271820377954},
+        {0, 586.8712784936447, 571.3833808148395},
+        {0, 557.5242762492126, 527.4051948485309},
+        {1, -527.9820655500421, -597.0614987497938},
+        {1, -594.7423576008234, -570.0387215442279},
+        {1, -545.604557338824, -554.0763169557739},
+        {1, -502.35172702595014, -586.8484342087179},
+        {1, -587.293337705269, -588.0796352216714},
+        {1, -587.0516505340747, -517.7300179102016},
+        {1, -597.0360062250987, -547.9934802704281},
+        {1, -540.578489505472, -519.8075273206096},
+        {1, -530.4922286462058, -523.234050745461},
+        {1, -570.1324748254381, -584.3427934817109},
+        {1, -508.71765087148526, -521.2260165247377},
+        {1, -506.10153233039114, -546.0469706912013},
+        {1, -587.6311232069863, -500.8789962962048},
+        {1, -585.9407497123008, -593.6250426349442},
+        {1, -597.7192354774427, -504.3968636076061},
+        {1, -587.6912279656732, -587.810549281485},
+        {1, -567.4906024676383, -529.7889328775241},
+        {1, -510.5883782383144, -564.6056218025714},
+        {1, -545.5877634339324, -503.13342363625316},
+        {1, -595.491952236763, -526.4157102337199},
+        {1, -565.8931103880244, -512.3930396698607},
+        {1, -564.9817304867518, -518.5421568025347},
+        {1, -528.5838433236987, -590.2716385768655},
+        {1, -568.3038165320794, -523.2037657971182},
+        {1, -513.579781599134, -540.7083264768794},
+        {1, -577.5234177434545, -574.4083212880694},
+        {1, -566.4331360533965, -529.8498325039095},
+        {1, -517.1862636590681, -544.9513758919965},
+        {1, -534.6578726508548, -515.7113551681354},
+        {1, -531.5918919225953, -508.0051177928042},
+        {1, -521.335920134657, -549.8508399779365},
+        {1, -587.6565547672371, -500.40617781899505},
+        {1, -502.89297655657947, -550.0462820641452},
+        {1, -565.9529549834383, -570.5296426883887},
+        {1, -539.695184660248, -566.3720803092855},
+        {1, -557.2412994794262, -516.6673702747074},
+        {1, -548.3193140374153, -511.0113251963232},
+        {1, -599.1568790407902, -559.2622714664305},
+        {1, -571.755520275542, -554.0839358749181},
+        {1, -544.964945135059, -564.448243523719},
+        {1, -574.7985361688525, -593.6384131471896},
+        {1, -563.642288502551, -538.3721218790038},
+        {1, -500.4279098845297, -583.9340798923859},
+        {1, -569.917708080877, -550.7162526230916},
+        {1, -549.8345448125123, -565.7759787232027},
+        {1, -527.8248193430064, -562.9256751876678},
+        {1, -508.69265110570973, -544.8174395269017},
+        {1, -561.7662650395065, -534.6799220439667},
+        {1, -510.11351976460816, -567.17615864117},
+        {1, -592.6464340868883, -546.7679954740394},
+        {1, -591.5566687475105, -516.335391669214},
+        {1, -598.1620280980214, -563.5663494736577},
+        {1, -571.6540085024682, -514.5024112396218},
+        {1, -597.0973739353884, -518.6402453320493},
+        {1, -597.971879649216, -541.9911785849602},
+        {1, -502.7804400334985, -527.9041465965335},
+        {1, -502.24013032418287, -596.8646708140396},
+        {1, -598.4180305891012, -535.013864017069},
+        {1, -575.018281589379, -596.0252991207353},
+        {1, -593.1939727679464, -557.8288153478848},
+        {1, -552.9384213856413, -579.3694486320592},
+        {1, -559.9203621818546, -554.0072497905501},
+        {1, -588.3411365623961, -575.9606196770269},
+        {1, -517.0844394937534, -547.9291196136605},
+        {1, -509.32764537741576, -591.737729755405},
+        {1, -557.2674260753181, -543.5864572972603},
+        {1, -565.1475139126333, -559.4796022645727},
+        {1, -556.0025119789701, -572.6261174533101},
+        {1, -590.7960121205607, -517.0840963139137},
+        {1, -580.3696729031607, -541.5331163469414},
+        {1, -519.8369954073894, -599.1883519701099},
+        {1, -590.5570159829517, -587.4602437344656},
+        {1, -502.5275914906194, -540.3454217852702},
+        {1, -584.1282872304774, -593.2194019651928},
+        {1, -557.8446121942737, -558.0626917521755},
+        {1, -580.209165096907, -588.7259851212183},
+        {1, -510.90874302504056, -591.5091481352281},
+        {1, -514.1724729381817, -595.1020401318071},
+        {1, -552.5076612804402, -548.397966879673},
+        {1, -565.2070083573942, -536.1826380211752},
+        {1, -565.9469212749985, -561.5506672108052},
+        {1, -526.4398083538586, -507.1913169678737},
+        {1, -595.2594496218172, -594.98464576562},
+        {1, -530.6904491548875, -519.0678635750138},
+        {1, -547.9945700155467, -597.6557660417575},
+        {1, -554.9468747569997, -591.1678311453294},
+        {1, -593.9678599910096, -518.9397714406934},
+        {1, -580.6827396967085, -541.1770564720399},
+        {1, -526.2991394747967, -595.5353558464069},
+        {1, -532.0567052472832, -547.7555982808492},
+        {1, -506.550640897891, -501.44148884553215},
+        {1, -537.7945174903881, -539.9517392521116},
+        {1, -588.1139279080066, -572.5589261656883},
+        {1, -598.4030676856231, -528.8036722121387},
+        {1, -532.6970859002654, -567.13898500018},
+        {1, -564.8245220213231, -595.6981311004888},
+        {1, -568.8669962693484, -516.5125158739406},
+        {1, -549.1709908638323, -558.8129291840139},
+        {1, -510.85336064345756, -575.3635308154353},
+        {1, -583.9245510800588, -536.793806117792}
+    };
+
+    /** The data is easy separated with classifier by y = x. */
+    protected static final double[][] twoLinearlySeparableClasses = {
+        {0.0, -122.69914721554494, -152.90003228835155},
+        {1.0, -988.7803093110984, 39.64498230320555},
+        {1.0, -721.0342526056645, -167.29469954420483},
+        {1.0, 606.0603250738964, 612.4505657575703},
+        {1.0, -435.7428098964267, 749.26660250907},
+        {0.0, 977.0266542119459, 906.2797731011997},
+        {0.0, 442.79191352401017, 99.68443783203702},
+        {1.0, -984.4696576079481, 98.58983213854299},
+        {0.0, 950.3560064579242, -54.087172588871226},
+        {0.0, 989.1247453182418, -942.9228555672748},
+        {1.0, -950.3830359669219, 720.9427578590175},
+        {0.0, -263.7437828854337, -369.67762228969286},
+        {1.0, -837.771820186008, 966.2671117206883},
+        {1.0, -101.63051923258354, 135.30595977925213},
+        {0.0, 927.4068611376827, 552.576689560276},
+        {1.0, 671.674613544031, 867.0342619845135},
+        {0.0, 489.04809639359723, -371.80622025525497},
+        {1.0, -577.8620591314951, -561.9793202960524},
+        {1.0, -628.699903999805, 746.9179933415019},
+        {0.0, 787.7955413710754, 729.8880998762927},
+        {1.0, -160.9905826731191, 597.1342309929371},
+        {1.0, -661.7582546189365, 294.3559610458383},
+        {0.0, 992.067372280372, -586.7840785767917},
+        {0.0, -229.6963941046797, -860.6481903559245},
+        {1.0, -459.91823406828814, 174.31002243199828},
+        {0.0, 132.09417954527203, -203.6015836943012},
+        {0.0, 458.8315635996389, -109.92869423399452},
+        {1.0, 424.63154498678796, 581.7436424491116},
+        {0.0, 606.7777384705123, 382.51034075942744},
+        {1.0, 133.97732363544492, 810.4293150045719},
+        {1.0, -752.3792672455503, 902.3533215842801},
+        {0.0, 124.02578589031486, -242.0045741962906},
+        {0.0, 65.95100120357665, -362.9563512717997},
+        {1.0, -975.7825688109236, -724.6782664271469},
+        {1.0, -885.3333915784285, -166.8285153252507},
+        {1.0, -242.89869955409756, 878.9999767933075},
+        {0.0, 271.2149993049329, -490.0480096390996},
+        {0.0, -74.16302081043352, -824.0859586265949},
+        {1.0, -520.4108075793048, 751.6954919374432},
+        {0.0, 104.03293413801771, -631.0663974778311},
+        {0.0, 179.4274025610996, -610.9764997543232},
+        {1.0, 291.2686412591502, 892.1178988173092},
+        {0.0, 723.1240938478552, -291.3765504086348},
+        {0.0, 12.575218418479949, -307.36975804125973},
+        {1.0, -397.44825972130786, -295.76021536144117},
+        {1.0, -163.90291786947955, 501.6868597449188},
+        {0.0, 513.9232732684154, -287.4072243396091},
+        {1.0, 146.81987289015547, 293.1152654799746},
+        {1.0, -422.734205503476, 154.09536939552663},
+        {0.0, 293.2607563043757, -141.65822134246525},
+        {1.0, -93.46771747630169, 73.91086927080437},
+        {1.0, -972.6525030120272, -867.0819061818511},
+        {1.0, -636.136018043414, 55.4840372628596},
+        {1.0, -821.240801777343, -750.3407912999469},
+        {0.0, 826.9598934792543, -48.17510971836464},
+        {0.0, -737.5399357047692, -834.168742619978},
+        {0.0, 910.2286110591372, -321.2153303241547},
+        {1.0, -539.8385115026349, -204.624635929521},
+        {0.0, 710.9811829617875, 156.53494004963864},
+        {1.0, -576.1327147891295, -255.98030417689222},
+        {0.0, -406.9117225223731, -568.1674835571359},
+        {1.0, 786.4324782672932, 879.9433045727255},
+        {0.0, 655.1507253229393, -931.0320133380443},
+        {1.0, 920.1359556509667, 975.4010808044634},
+        {0.0, 340.9923780361835, -791.6415124130187},
+        {0.0, 789.0326432258107, 101.45600150894029},
+        {0.0, 301.62354598942807, -263.0383267796972},
+        {0.0, -196.75683699829483, -759.6731432356696},
+        {1.0, 104.36756752228234, 362.6645930627608},
+        {0.0, -110.09892045131369, -522.6327938767872},
+        {0.0, 983.058982063912, -853.6685099856713},
+        {0.0, 853.0396544144112, -373.6430440893963},
+        {0.0, 894.5396176478532, -259.3520478430646},
+        {0.0, -59.540445910742505, -405.2785421154832},
+        {1.0, -195.02204474289272, -98.01099074578019},
+        {1.0, -400.33845881394757, 517.4826371806812},
+        {0.0, 998.8721163227847, 658.7589886248159},
+        {1.0, -739.9839264739526, 281.7808456690698},
+        {0.0, 225.2955438875149, -240.13571797647785},
+        {0.0, 415.36363610958847, 119.2467848060553},
+        {1.0, -430.93611072673775, 953.9339020518189},
+        {0.0, 695.641934652828, -613.3163270715312},
+        {1.0, -977.0662561296275, 44.1566618295617},
+        {0.0, 894.0074404584143, 115.97551230630302},
+        {1.0, -256.65810543256225, 121.31432413171797},
+        {1.0, -745.2570475473517, 144.83266177886867},
+        {0.0, 865.266441371979, -329.08860770412593},
+        {1.0, -262.69924145366974, 196.52256942501003},
+        {0.0, 858.8703536921596, -755.3718265129426},
+        {1.0, -620.7574721811682, 744.695289706485},
+        {1.0, 526.9918067706062, 622.6110941283573},
+        {1.0, 30.51838905352247, 451.84360857486945},
+        {1.0, -886.670070825786, 955.5438997547349},
+        {0.0, -419.85446648529296, -904.4363933507589},
+        {1.0, -19.357361515996104, 288.3545217146416},
+        {1.0, 425.807567480902, 617.3859577708511},
+        {1.0, -369.8197242330872, 428.4625522196195},
+        {1.0, -540.2030619980012, 980.1078500916262},
+        {0.0, 963.0216885940265, -999.6718455904652},
+        {0.0, -36.084390168692494, -930.2210871204579},
+        {0.0, 686.7777019875359, 274.083830555807},
+        {1.0, -798.5755214306325, -292.6360310433025},
+        {0.0, -302.49374189510456, -979.2873514693756},
+        {1.0, -473.88156240514184, 290.3700442022921},
+        {1.0, -619.3422333592813, -203.62900604757556},
+        {1.0, -603.8165620304862, 433.7049783716991},
+        {0.0, -394.9003601369652, -423.49571094476414},
+        {0.0, -297.5499912778255, -379.6966117627778},
+        {0.0, 914.6350307682171, 395.0639307730339},
+        {1.0, 302.2432544019764, 420.03068857885364},
+        {1.0, -486.2192439106092, 504.61160963291354},
+        {0.0, -80.9055582464382, -999.3540019713568},
+        {1.0, -808.7735610468485, -600.3003616235419},
+        {1.0, 559.7216432827174, 573.1410775962665},
+        {0.0, 107.25054731907449, 56.68399536280276},
+        {1.0, -986.8173329580039, 955.5975873551458},
+        {0.0, -28.898975148538057, -764.5914349235939},
+        {0.0, 544.5435587517745, 541.7144224905855},
+        {1.0, -733.3388961452514, 995.0625378143936},
+        {0.0, -424.0376248679678, -808.8197992783022},
+        {0.0, 69.10888994619336, -596.3814493832142},
+        {0.0, 668.7563898645246, -309.5338641095864},
+        {1.0, -664.6829023895461, -421.3131122742957},
+        {0.0, 34.30209430645755, -10.50945210920679},
+        {0.0, -370.6335997213754, -510.2102646234516},
+        {1.0, 430.4223842649294, 947.0324231650752},
+        {1.0, -561.4417521638584, 912.0398180862007},
+        {0.0, -529.1099093762112, -787.9426065835444},
+        {0.0, -784.2287272477402, -950.6749150482902},
+        {1.0, -292.2382923363127, 29.73057963193787},
+        {1.0, 543.8216641288004, 574.9668960406921},
+        {0.0, 492.70797586385834, -508.7411915523603},
+        {0.0, 847.4958582226334, 141.27775112134555},
+        {0.0, -294.9950818964355, -539.6512583592041},
+        {1.0, -731.3440778046363, -194.13179207217638},
+        {0.0, -26.21276485761848, -177.1382736912766},
+        {0.0, 169.10051967522577, -877.8835027096119},
+        {0.0, 869.7338657560076, -216.14439990877327},
+        {0.0, 676.9668800100419, 487.3264255975398},
+        {0.0, 340.2086777131092, -483.69798685778176},
+        {0.0, 177.05787101614578, -187.8731928010908},
+        {0.0, 514.0064634256835, -838.309309799528},
+        {1.0, -945.6616134661633, -892.0662652148447},
+        {0.0, 706.7531607568874, 584.875678987067},
+        {0.0, 996.1691889712217, -381.420741757301},
+        {0.0, 846.3827047328193, 138.5937078747695},
+        {1.0, -579.1773394655615, -551.6157981896823},
+        {1.0, -379.8315393213704, 376.240073123181},
+        {0.0, 416.70241675343345, -762.0460887999392},
+        {0.0, 784.4659593773504, -476.3450292459248},
+        {0.0, -328.2495971471759, -797.0282102006712},
+        {1.0, 427.63385513313506, 691.0529822653089},
+        {0.0, 478.22491887051683, 368.08172770775104},
+        {0.0, -194.5486491952804, -635.7562271928532},
+        {1.0, 462.9118544444739, 546.477694721709},
+        {1.0, -364.33646342640543, -16.525517700831642},
+        {1.0, 191.5538518885253, 534.4886561736935},
+        {1.0, 162.29801970257063, 204.07339353277848},
+        {1.0, 359.87375962515307, 510.4390321509045},
+        {0.0, 906.0920707478278, 518.474366833321},
+        {0.0, -23.926514764001354, -545.5535138792807},
+        {1.0, -457.5490330216003, 462.75697632384026},
+        {1.0, 361.19368061986074, 602.0833438729098},
+        {1.0, 240.82404813916537, 903.8580437547587},
+        {0.0, 682.9887385477937, -575.5748494609797},
+        {0.0, -524.9683035626636, -643.4995281011295},
+        {1.0, -868.3907344133812, 687.0334981662659},
+        {0.0, 483.1046447412375, 425.5242965675352},
+        {0.0, 441.7390582141493, -178.6473657093535},
+        {0.0, 857.9901628015248, -725.079106653412},
+        {1.0, 3.9407370946466926, 501.36916187999213},
+        {0.0, 987.6165576421165, -870.7792926909152},
+        {0.0, 38.550394080002434, -316.2460756905849},
+        {1.0, 259.98559430828277, 779.1704474238529},
+        {1.0, -772.0783930084303, 457.81379891960387},
+        {0.0, 965.2460667816263, -900.5906154928432},
+        {0.0, 435.8488975524808, -807.3179393158829},
+        {1.0, -414.9097308847265, 663.2091519493613},
+        {0.0, -692.3369071358595, -853.7674486529854},
+        {1.0, -527.6968945977544, -89.29268231562753},
+        {0.0, 98.58509375449921, -812.2575242800065},
+        {1.0, -246.4858612821199, 690.7736181778389},
+        {0.0, 306.0413673433336, 50.36342267895475},
+        {0.0, -326.3755954952927, -630.9271581822045},
+        {0.0, 435.3759701541835, -478.72141764190417},
+        {0.0, 150.07627192243012, -126.16495181072969},
+        {0.0, 999.2382522208045, 293.8336213483592},
+        {1.0, -970.7818229850416, 559.8116781984274},
+        {0.0, 321.62133209742956, -446.07065722044115},
+        {1.0, 387.61470906465297, 809.9877801153038},
+        {1.0, 375.48380231362376, 548.1340438996276},
+        {0.0, 198.31962497327982, -841.3407638914643},
+        {0.0, -59.75027524961797, -196.91881794207666},
+        {0.0, 539.4390329297466, 265.73233936446013},
+        {1.0, 161.7769611006779, 420.4911194344545},
+        {1.0, -422.73262266569805, 305.27632230640575},
+        {0.0, 419.7041783295376, 384.4277361814418},
+        {1.0, -384.80122335064925, 128.84723939702212},
+        {0.0, 345.8732451410485, -634.6766931661393},
+        {1.0, -753.0957875425104, 162.043321600848},
+        {1.0, -721.0825943433963, -647.1437151757809},
+        {0.0, 737.8179495142201, -612.9000146979762},
+        {0.0, 165.62609685662937, -209.04556534374638},
+        {1.0, 211.75025757991534, 762.4363190775396},
+        {0.0, -282.0707259050812, -631.5669067165459},
+        {0.0, -10.649387489441551, -11.742073063187377},
+        {0.0, 532.2273317939553, -714.4637938741703},
+        {0.0, 851.6255007653094, -428.168617931829},
+        {0.0, -650.2303513768155, -701.0819971407498},
+        {0.0, 486.19072881419584, 17.642342348021202},
+        {0.0, 937.5878660613639, 253.91073899684488},
+        {1.0, -481.7837261941776, 386.0515070365086},
+        {1.0, 898.8591491398315, 960.3282479515362},
+        {1.0, -795.2119099095994, -52.442255260638944},
+        {1.0, -832.14760576095, 406.48368080778823},
+        {1.0, 317.3610961002403, 475.88090137988934},
+        {1.0, -543.9941239514503, 937.9571974443777},
+        {1.0, -737.7149868841586, 412.02870959820666},
+        {1.0, -86.04799530647608, 764.2717139104996},
+        {1.0, -908.3441434769735, -52.62148904481751},
+        {1.0, -558.4878652128368, 975.5017115797407},
+        {1.0, -120.28961819893993, 58.60059810912276},
+        {0.0, 797.7665926374921, -530.0884822652556},
+        {0.0, -248.62486746176887, -983.5555931167586},
+        {0.0, 910.1931415438364, 35.953135142478914},
+        {1.0, -304.741023136228, 253.0138864886694},
+        {0.0, -510.13133519018925, -642.3600729680307},
+        {0.0, 988.5683650098642, -751.2030447890847},
+        {1.0, -118.0142080751416, 352.20209758019996},
+        {0.0, -638.757222741898, -685.6631975937353},
+        {0.0, 759.5622347453971, -722.2769348273996},
+        {0.0, -740.3498419247273, -974.2677201928796},
+        {0.0, -776.6102763008262, -993.7697826767383},
+        {1.0, -895.9448277148887, -462.29125820523006},
+        {0.0, -311.8810163384071, -318.9742942085709},
+        {0.0, 368.78035230644787, -273.65009131252566},
+        {0.0, 731.1488644867686, -184.2725009666142},
+        {1.0, 240.0262332913362, 544.8792933528591},
+        {1.0, -129.8786600652611, 122.64122390591797},
+        {1.0, -998.8693504661202, -989.3959455521401},
+        {0.0, 358.9021702584721, -372.46195332982563},
+        {0.0, 423.66170839399, -3.6733713507491075},
+        {0.0, 320.08527272511014, -267.49487239617406},
+        {1.0, 628.8557340365153, 716.1736088420723},
+        {1.0, 87.0852622927755, 191.08205494997515},
+        {0.0, -163.5535634273158, -401.43333064263857},
+        {1.0, 241.57291015127043, 354.07473809573935},
+        {0.0, 425.42982178930424, -659.6389818980119},
+        {1.0, -513.057622632338, -150.48805414197307},
+        {0.0, 435.2888705572377, -500.4699931158425},
+        {1.0, -761.2341202466506, 919.1637075257438},
+        {1.0, -254.8539665845866, 711.5522826694619},
+        {0.0, -350.2587997576785, -911.7842377194485},
+        {0.0, 588.5547568621123, -16.003674634160916},
+        {0.0, -557.7880688291352, -939.7740734026603},
+        {0.0, 683.6988697659988, -285.8831419034458},
+        {0.0, 782.8461154585116, 426.91516285206694},
+        {1.0, -792.3388875152918, 361.1342300030676},
+        {1.0, -673.792921360787, 820.8934158286147},
+        {1.0, -15.357504282120772, 15.275909249335541},
+        {0.0, -99.22050275699814, -249.077767711845},
+        {1.0, -820.111231678807, -320.1107983145504},
+        {0.0, 911.7878651586336, 825.2998851049153},
+        {1.0, -750.2941326911656, -629.1546336560141},
+        {1.0, -890.6374102685097, -804.5407239545832},
+        {1.0, -204.75148861468108, 722.1116624961337},
+        {0.0, 519.1714356909579, 154.07772492651725},
+        {0.0, 982.2450336212896, 897.8824490832485},
+        {0.0, 554.4793545664838, 335.7541373769475},
+        {1.0, -339.90247025178235, 47.02715071976445},
+        {0.0, 901.2543768759774, -662.3275399668249},
+        {1.0, -942.3762411246095, -875.0025895092708},
+        {0.0, 418.20256050104604, -414.3102074305251},
+        {0.0, 625.0294460702908, -625.6315794655841},
+        {1.0, -449.74570685873516, 937.185777575773},
+        {0.0, 508.2386960118979, 454.0962431757914},
+        {1.0, 331.4089009636193, 589.2741722009719},
+        {1.0, 99.06469391982864, 187.0394494146019},
+        {1.0, -982.3370248476699, 322.0973186273661},
+        {1.0, 548.6443983489316, 708.7265431968447},
+        {0.0, 918.9454013804204, -383.8602043941679},
+        {1.0, 47.025960736300476, 171.219298464468},
+        {0.0, 378.2597384891858, 163.1492885941102},
+        {0.0, 438.65288112462554, -139.6734662005057},
+        {1.0, -831.8875659762939, 892.6667556591465},
+        {0.0, 883.0433572247841, -405.08376291753257},
+        {0.0, 885.9349479866808, -577.4873262774219},
+        {1.0, -614.7099535083557, -133.06983968843338},
+        {0.0, 111.7257364798395, -585.9016094589116},
+        {1.0, 453.9214560104581, 999.4093349063546},
+        {1.0, -660.6080448479984, -558.4295455433598},
+        {0.0, -466.8209751830958, -591.196870091049},
+        {0.0, -964.7665601618734, -997.9800903796079},
+        {0.0, -236.07763234295055, -450.41129146522917},
+        {0.0, -621.6876241277605, -797.4500041783042},
+        {0.0, -773.3591978507126, -890.0043590247606},
+        {1.0, -41.04699663875965, 822.3779367276668},
+        {0.0, -88.10853803965915, -192.37350885363378},
+        {0.0, 663.981740050287, -508.81572667480236},
+        {0.0, 15.59472374839936, -806.7541810675616},
+        {1.0, -892.7104844234832, -708.5235867565298},
+        {1.0, -484.65491520217245, 386.6430150137869},
+        {0.0, 865.0610549279427, 615.8811284084713},
+        {1.0, -824.4355093837889, 655.3234320109748},
+        {1.0, -274.68139814419976, -239.53727115479273},
+        {0.0, -86.4277464637313, -881.0777192437689},
+        {1.0, -581.4932661460668, 769.3538369247574},
+        {0.0, -432.5850223289913, -577.4260081674186},
+        {1.0, 166.76522990130684, 582.4331818363789},
+        {0.0, 396.8182460459341, 248.34183939490367},
+        {1.0, -509.8701926143476, 368.8796357552451},
+        {1.0, -482.54152901054886, -248.83959837521047},
+        {1.0, -300.50297994358345, 742.4139758199028},
+        {0.0, 163.28493788474384, -61.41706872692157},
+        {0.0, -399.2277405988791, -930.6519043114885},
+        {0.0, 44.13900477801826, -571.5314250642764},
+        {0.0, 457.8794897532496, -505.99693186447195},
+        {0.0, 16.85880382123935, -451.1811783607169},
+        {1.0, -743.4540696447744, 325.39937301862096},
+        {1.0, 57.40459247973081, 106.58399169789641},
+        {1.0, 183.98880310846016, 499.74779967287395},
+        {1.0, 567.0903172389608, 820.4986606446041},
+        {0.0, 672.4806526088855, 300.601012280614},
+        {0.0, -343.8894522407976, -761.4942297431235},
+        {0.0, 870.247864223385, -168.14608036197296},
+        {1.0, 593.005455426467, 673.1630290763387},
+        {0.0, -625.9494316959813, -983.6968015830237},
+        {1.0, 494.1754094118269, 992.2691899024903},
+        {0.0, 61.401789304312615, -773.2837841463802},
+        {1.0, -194.76742246565573, 69.77988116139159},
+        {0.0, 206.82364861578685, 121.15474801344544},
+        {1.0, -265.964495521001, 50.790074285276205},
+        {0.0, 818.3873132702915, 36.49793444927877},
+        {0.0, 99.81409878465752, -628.0274914181116},
+        {0.0, 464.149315901346, -321.29715928735277},
+        {1.0, -164.52462729937565, 952.4896905712137},
+        {0.0, -63.17364851415209, -149.49056773721736},
+        {0.0, 882.9288293898815, 171.00117804059573},
+        {0.0, 473.3733180102365, -689.3426862684687},
+        {0.0, 165.7220875180078, -354.71003889056044},
+        {0.0, 525.5517697849327, 415.84107073078167},
+        {0.0, -38.184721358457864, -99.36030799911896},
+        {0.0, 242.96729902384163, -156.16029387422054},
+        {0.0, 448.4711090805122, -495.01683482080705},
+        {1.0, -80.15226220702493, 970.6850105496733},
+        {0.0, 870.3328249998483, 583.0363909361256},
+        {0.0, -238.61798549246464, -430.95739845768026},
+        {0.0, -153.01230031899092, -482.12077718764306},
+        {1.0, -118.06183953458049, 40.44154430898425},
+        {1.0, -876.8968143885145, -370.6419068924105},
+        {0.0, 989.8165746071368, -943.0636134966381},
+        {0.0, 448.68476431428917, 44.44832374987436},
+        {0.0, -5.562631397638029, -594.7883897866259},
+        {0.0, 880.7175397337289, 786.6444839355895},
+        {0.0, 476.3278235630439, -756.8025350513306},
+        {0.0, -209.1261948306602, -366.9709734757247},
+        {1.0, -1.5342655753494228, 295.69953419777266},
+        {1.0, 98.88194946977887, 709.984198980128},
+        {1.0, -102.4522435336255, 348.55854643990347},
+        {1.0, 431.6422144084929, 488.26608578711966},
+        {1.0, -629.5648689407153, -389.98821373225144},
+        {1.0, -655.6263155228037, 89.12505314113082},
+        {0.0, -201.6475575882739, -902.9470477574147},
+        {1.0, -342.30143560116915, 157.21169053018912},
+        {1.0, -671.4797028289656, -49.48397951858112},
+        {1.0, -993.3541982679827, 428.50119148048657},
+        {0.0, 158.95824836793054, 115.93705315336206},
+        {1.0, -858.292999815246, 946.8912002937116},
+        {1.0, -223.10861890967476, 190.7507270694814},
+        {0.0, -147.9091707330915, -899.2785339400244},
+        {0.0, 254.55648822491457, -260.9331332388332},
+        {0.0, 560.3172529225217, 388.76836664538814},
+        {0.0, 924.1007767093995, 56.69156104001263},
+        {0.0, 62.42705110549082, -888.0360838024912},
+        {0.0, 222.43761905783595, 88.18795871018938},
+        {0.0, 489.8756173625022, 421.3474970424486},
+        {0.0, 246.6646015601891, -506.3175818566548},
+        {0.0, -620.5001534479718, -774.7836865370457},
+        {1.0, -654.0153133260937, -369.1547696738236},
+        {0.0, 853.1429595371762, -87.56985188355861},
+        {1.0, -226.84561483455388, 122.80144293902458},
+        {1.0, 335.09779003775316, 731.0032200516428},
+        {1.0, 87.90214612318391, 724.8989520503376},
+        {0.0, -51.792728592205776, -298.0103777307395},
+        {1.0, -421.181682827191, 41.01565470282776},
+        {1.0, -626.6392286104665, 227.98017875883284},
+        {1.0, -839.0341042344045, 990.7893877153003},
+        {1.0, -9.321936022159207, 125.24249479969853},
+        {0.0, 665.2916192497585, 314.9312297793483},
+        {1.0, -236.71130814979108, 41.56269468081973},
+        {1.0, -695.4935496704909, -364.376100277162},
+        {0.0, 60.90303121087936, -525.9732822401365},
+        {1.0, -740.9211189318623, 328.1577766746841},
+        {0.0, 636.7728693761635, 231.63887313030887},
+        {0.0, 783.8640093145868, -86.94016828207737},
+        {1.0, -122.79445443476675, 446.8427679254348},
+        {0.0, -599.127065456006, -641.9946421169902},
+        {0.0, -133.3932116798295, -715.8087793479069},
+        {0.0, 868.1768857382554, -356.8832640029416},
+        {1.0, -729.5079555062296, 48.18869346933934},
+        {1.0, -323.311327276945, 51.37289795053448},
+        {1.0, -863.9094602749768, -526.3307161874084},
+        {0.0, -172.237643059304, -545.395840196842},
+        {1.0, 379.0803154405653, 860.9286051762328},
+        {0.0, 646.3490077056538, 221.13771257535495},
+        {1.0, -493.2329575593668, 938.8602740452263},
+        {0.0, 852.1508064390962, 186.42129731281898},
+        {0.0, -105.17633183875978, -819.8477185986328},
+        {0.0, 794.7790444633961, 225.19911969860573},
+        {0.0, 306.4485552684148, 290.3991023596727},
+        {1.0, -348.52545404552563, -302.8538669615166},
+        {1.0, -621.5896829696857, -586.764214213187},
+        {0.0, -360.9052184666539, -501.2314262330038},
+        {0.0, 512.0475423578778, -968.4211685736286},
+        {0.0, -1.0553261239787162, -649.1131987920394},
+        {1.0, -353.0059560079317, -343.82940709059096},
+        {0.0, 281.71038662642286, -536.6960537047482},
+        {1.0, -919.2355704939898, 782.9875939766282},
+        {1.0, -554.7648476025646, 670.76664941987},
+        {0.0, 287.54041983444336, 106.2628262971964},
+        {1.0, -71.36414070058743, 481.00905876949264},
+        {1.0, -525.4581932812421, 507.16990298296923},
+        {0.0, 510.1084615227803, -813.3443471544821},
+        {0.0, -515.8000398448883, -551.1523846072581},
+        {1.0, -941.5905835281701, 178.53493537516124},
+        {1.0, -826.4320007540575, -391.32308974320074},
+        {1.0, -362.25207668798646, 711.1776477575349},
+        {1.0, -363.13146140965796, 58.76850122459791},
+        {1.0, -637.0939514034111, -57.18171960880602},
+        {1.0, 811.8537434287423, 893.8406118576338},
+        {1.0, -351.36128471993413, -164.8367432830371},
+        {0.0, -625.8073644486308, -938.5091097468568},
+        {0.0, 131.36904305993585, 59.945922200265386},
+        {1.0, 300.49666138667953, 544.089396622054},
+        {1.0, 150.9533638033147, 943.667562848439},
+        {1.0, -232.3556550990304, 976.0470122102599},
+        {1.0, 135.8097187722467, 262.21166985817695},
+        {0.0, -97.51353115825805, -890.6273287611524},
+        {1.0, -711.4020131465077, -20.13806627790268},
+        {0.0, 917.1543030685937, -872.6562190191934},
+        {1.0, -657.7632592299774, -596.4956657628013},
+        {0.0, 806.7273372492091, 154.3973882475018},
+        {0.0, 371.7932221354017, -847.5721372522485},
+        {0.0, 887.0251089691258, -306.6059397900773},
+        {1.0, -171.52557116367404, 819.6507572581761},
+        {0.0, 632.2374116222845, -635.8014704304069},
+        {0.0, -213.33363068356653, -639.038384943213},
+        {0.0, 737.7847710201636, -843.291366957395},
+        {0.0, -430.7114667797465, -665.7014140302028},
+        {0.0, 18.317432837854085, -309.1307864153605},
+        {0.0, 689.3196508440624, 398.22692583132357},
+        {0.0, 908.6965655126414, -321.7431267700932},
+        {0.0, 604.2361606207025, -174.1208906780612},
+        {1.0, -816.014328616853, -468.5728222442267},
+        {1.0, -124.50677921712554, 439.4225345583168},
+        {0.0, -736.4729915358428, -745.435394454091},
+        {1.0, -201.1314081356761, 132.070557003796},
+        {1.0, -538.2469045343253, 719.2630473774586},
+        {1.0, -579.3039091203984, 961.7644587928542},
+        {1.0, -131.07569768983058, -14.067659190625022},
+        {1.0, -961.9324831150435, 815.7775199747161},
+        {0.0, 959.0805916122792, 210.22031178108682},
+        {0.0, 537.3004634155134, -821.1232504829824},
+        {1.0, -525.577776451393, 523.8546325250404},
+        {1.0, -490.37425007561785, 613.9247103792861},
+        {1.0, 725.2941641152454, 924.7691776631311},
+        {0.0, 850.5191959199387, -911.7156754307339},
+        {1.0, -535.3827552133765, -256.1333041657481},
+        {1.0, 93.24441210512305, 980.899958839474},
+        {1.0, 125.58210878499744, 489.9200659506546},
+        {1.0, -265.0907509361897, -181.36232727265053},
+        {1.0, -805.0528978104943, -774.3428711441273},
+        {0.0, 299.481029365769, 274.2467784888322},
+        {1.0, -872.6432839751412, -724.9692038478101},
+        {0.0, -327.77109720806027, -346.06090524099113},
+        {0.0, -769.9407295518204, -947.4499512111647},
+        {0.0, 708.176001237056, -701.9900242821255},
+        {0.0, 429.7900423607498, -767.8607100772805},
+        {0.0, 514.9666605063433, -252.09527799878242},
+        {1.0, -392.6943024744394, 943.3642876383242},
+        {0.0, -171.97676164837765, -964.9749845719992},
+        {0.0, 25.3949751703301, -761.3459408840288},
+        {0.0, 327.0516125752938, -81.26274312696592},
+        {0.0, -926.4851014957853, -970.9563176084357},
+        {1.0, -985.2416286372801, -758.6127879964147},
+        {0.0, 338.7854869375187, -231.37122411100802},
+        {1.0, -995.9157184785086, -310.8674450540059},
+        {0.0, 485.52790893379097, 7.909018196822899},
+        {1.0, -289.76601009744377, -93.43411467378803},
+        {1.0, -352.91681813664957, 970.6609344632727},
+        {1.0, -634.2596635738871, 478.54324561131875},
+        {1.0, -496.623286353002, 526.7778661797483},
+        {0.0, 837.0404771301767, 671.1823960639354},
+        {0.0, -284.5931069950618, -893.2503900000672},
+        {0.0, 739.6925158457948, -572.886151546864},
+        {1.0, 505.37418939555437, 914.4939776238757},
+        {0.0, 65.79978723030536, -59.26282586191303},
+        {0.0, 775.1318885055389, -698.3367782064498},
+        {1.0, -871.3166585822554, -351.74555670546727}
+    };
+
+    /** 4 sets grouped around of square vertices. */
+    protected static final double[][] fourSetsInSquareVertices = {
+        {0, 9.35096604945605, 9.946073797069054},
+        {0, 9.135109633114403, 9.962676066205383},
+        {0, 9.046654725589521, 9.610699793950662},
+        {0, 9.827221553421282, 9.4176319880153},
+        {0, 9.277441430833566, 9.502990699976},
+        {0, 9.444827307967367, 9.903310367805602},
+        {0, 9.911404997680545, 9.226246217883297},
+        {0, 9.950231642973769, 9.453518533258803},
+        {0, 9.281545278543017, 9.438272102773379},
+        {0, 9.032306746555102, 9.517675092676706},
+        {0, 9.286542956290456, 9.15288903978334},
+        {0, 9.896451632473255, 9.019751070009821},
+        {0, 9.611642481367562, 9.17209652044495},
+        {0, 9.592540623266126, 9.306160678545629},
+        {0, 9.817470117880873, 9.838651444371973},
+        {0, 9.263220850397941, 9.139179322873582},
+        {0, 9.949097640181272, 9.624710378790242},
+        {0, 9.616004097319287, 9.421557303733453},
+        {0, 9.512900976289933, 9.28642137092367},
+        {0, 9.207793663546337, 9.40094289636865},
+        {0, 9.079279410265883, 9.76978559451163},
+        {0, 9.328945661288095, 9.645773710532888},
+        {0, 9.80101696222916, 9.511903913501255},
+        {0, 9.882593127029741, 9.73545127073394},
+        {0, 9.75372887212885, 9.435141350132769},
+        {0, 9.288527674365598, 9.055665753045206},
+        {0, 9.88272159816372, 9.055932205550423},
+        {0, 9.385642321423624, 9.922172934733265},
+        {0, 9.830217517055729, 9.415174260405154},
+        {0, 9.184970761195489, 9.03515483431538},
+        {0, 9.747503155479809, 9.38708759338332},
+        {0, 9.953962908254736, 9.483949174467012},
+        {0, 9.271685731881993, 9.128890010491494},
+        {0, 9.441240324686845, 9.07960435205457},
+        {0, 9.168560731741703, 9.256530860101089},
+        {0, 9.010517147230432, 9.94335328515589},
+        {0, 9.1749227239244, 9.018681913631386},
+        {0, 9.413360501729251, 9.302212703700196},
+        {0, 9.439461439481182, 9.318631395882242},
+        {0, 9.531551691985907, 9.232525664308465},
+        {0, 9.466805772615563, 9.511711890834333},
+        {0, 9.633242901042053, 9.972778102570045},
+        {0, 9.517692290376388, 9.73537462150143},
+        {0, 9.187046049036134, 9.059073377533783},
+        {0, 9.121523234392956, 9.504221886903101},
+        {0, 9.493957951674021, 9.608201135992367},
+        {0, 9.981993764415321, 9.333278989889811},
+        {0, 9.371277571698762, 9.110041365023866},
+        {0, 9.681446270907697, 9.7870063720198},
+        {0, 9.639466883264246, 9.434768030033164},
+        {0, 9.391982858267035, 9.934707093985823},
+        {0, 9.550060071547726, 9.473132681990514},
+        {0, 9.256562054384402, 9.211913854106896},
+        {0, 9.46408385327689, 9.158869250798142},
+        {0, 9.442994981367162, 9.189227375629654},
+        {0, 9.697833866121318, 9.21112449845501},
+        {0, 9.115534908153043, 9.115227178046245},
+        {0, 9.835218474137239, 9.98174155822633},
+        {0, 9.026698146309743, 9.248759846540965},
+        {0, 9.68118581769866, 9.40512628823504},
+        {0, 9.81721640069966, 9.369105145483651},
+        {0, 9.975877208452287, 9.640693828024975},
+        {0, 9.823272242807437, 9.46823993908653},
+        {0, 9.638281188176519, 9.534774307683374},
+        {0, 9.597003178481613, 9.84238115941204},
+        {0, 9.941999007792681, 9.331877359355289},
+        {0, 9.050540877852525, 9.244472301490417},
+        {0, 9.358931306187054, 9.900809398285286},
+        {0, 9.170247599517836, 9.87585551194908},
+        {0, 9.461705027907554, 9.167319400226486},
+        {0, 9.076729207165052, 9.677578134220534},
+        {0, 9.488544686081216, 9.62380634923249},
+        {0, 9.929150661994122, 9.152491122614597},
+        {0, 9.890051482992417, 9.1709621079536},
+        {0, 9.839485513056095, 9.643849781319778},
+        {0, 9.749461922180853, 9.045432748127462},
+        {0, 9.58439542919333, 9.225044809549836},
+        {0, 9.479465134364697, 9.706551666966702},
+        {0, 9.00707492076871, 9.839317970534172},
+        {0, 9.948409701102793, 9.380261430658763},
+        {0, 9.264850115578076, 9.696516344063658},
+        {0, 9.977078194073387, 9.213405339955512},
+        {0, 9.648087669569941, 9.898977891084664},
+        {0, 9.724090075117749, 9.876133066062916},
+        {0, 9.445249316659568, 9.373023119966643},
+        {0, 9.995541563884071, 9.57923804140667},
+        {0, 9.667359233860397, 9.720098746660245},
+        {0, 9.379303845088474, 9.520602789251743},
+        {0, 9.996287800651865, 9.838061655335768},
+        {0, 9.318835567328465, 9.009915558605616},
+        {0, 9.103894679089793, 9.674971708485224},
+        {0, 9.346826400314828, 9.888779618232787},
+        {0, 9.659116962016478, 9.608712473271416},
+        {0, 9.661516337354719, 9.416786365864226},
+        {0, 9.642593770590324, 9.251344999039574},
+        {0, 9.134003475979116, 9.551760245909657},
+        {0, 9.524862003327057, 9.307789887454172},
+        {0, 9.883705581666579, 9.325086464359684},
+        {0, 9.96076863440133, 9.81636527085299},
+        {0, 9.995704158311584, 9.544553004819253},
+        {1, -9.094953387232211, 9.06233128328723},
+        {1, -9.304897363378368, 9.143926554861004},
+        {1, -9.03524958020074, 9.370326522034881},
+        {1, -9.120893310395626, 9.271851530835537},
+        {1, -9.510902040922451, 9.2470398948938},
+        {1, -9.6525973741057, 9.725355730393005},
+        {1, -9.65730261326345, 9.757814601272596},
+        {1, -9.597463454487615, 9.870093256106818},
+        {1, -9.190101362739775, 9.594505054154807},
+        {1, -9.72020516663928, 9.49084494643775},
+        {1, -9.723347588431338, 9.129139508430457},
+        {1, -9.33996314024198, 9.525934956132764},
+        {1, -9.824803485424123, 9.128546700002982},
+        {1, -9.346973220919576, 9.934992542662958},
+        {1, -9.685940369418338, 9.30810392592615},
+        {1, -9.064058121381708, 9.846942888423445},
+        {1, -9.368987058951426, 9.557135466015499},
+        {1, -9.782353308524383, 9.857550405413855},
+        {1, -9.281500887267686, 9.056968941046172},
+        {1, -9.514451522447168, 9.713696846961527},
+        {1, -9.607099689382135, 9.682075033940093},
+        {1, -9.144871412854759, 9.146320338346246},
+        {1, -9.54203309158306, 9.220014377847022},
+        {1, -9.238269645840251, 9.948063795512258},
+        {1, -9.286942806777112, 9.522342489392214},
+        {1, -9.591474157985536, 9.240285207594253},
+        {1, -9.652843973116592, 9.557983695755953},
+        {1, -9.126794849562028, 9.452966323026885},
+        {1, -9.877221229728452, 9.151312939643672},
+        {1, -9.170379066479606, 9.381576400806694},
+        {1, -9.411298671068392, 9.133322302544746},
+        {1, -9.666443924685849, 9.66428867311317},
+        {1, -9.347964494643556, 9.012849397302583},
+        {1, -9.493681117964078, 9.332240464982554},
+        {1, -9.623975723800413, 9.419921503264844},
+        {1, -9.292219487063763, 9.00214102314859},
+        {1, -9.194419464738496, 9.640048387436925},
+        {1, -9.886720923292938, 9.834939723803704},
+        {1, -9.90520284610924, 9.17595267606471},
+        {1, -9.284829868633738, 9.268795876426012},
+        {1, -9.498878372098952, 9.5997098342015},
+        {1, -9.359302922869169, 9.47880701571168},
+        {1, -9.258562740082393, 9.497531680793207},
+        {1, -9.895388929537848, 9.00756585816333},
+        {1, -9.627928477333924, 9.391262771761872},
+        {1, -9.525281129279826, 9.796892255719904},
+        {1, -9.59598592778135, 9.067874949457092},
+        {1, -9.110283105135892, 9.821653780489235},
+        {1, -9.343973780672988, 9.63557812382392},
+        {1, -9.87812414314095, 9.978007969979139},
+        {1, -9.98832246915748, 9.623150872300222},
+        {1, -9.115997082508613, 9.965470531748467},
+        {1, -9.874391718339105, 9.214113577543877},
+        {1, -9.671664494678888, 9.15862012290195},
+        {1, -9.031596433460688, 9.616814958480965},
+        {1, -9.758627761132653, 9.511908952613643},
+        {1, -9.205087108977219, 9.840949306240816},
+        {1, -9.171734592697309, 9.702842939318314},
+        {1, -9.082886085070493, 9.524201651321903},
+        {1, -9.74595864484071, 9.219346103723025},
+        {1, -9.898468941378516, 9.994402484197503},
+        {1, -9.341582531784448, 9.193680038418634},
+        {1, -9.570090524257228, 9.201198104295603},
+        {1, -9.88361320124743, 9.027615263347323},
+        {1, -9.154222720481965, 9.799927021695417},
+        {1, -9.364221227791875, 9.042090834574182},
+        {1, -9.333131749015948, 9.790442620484125},
+        {1, -9.286700941581561, 9.89073867458494},
+        {1, -9.348737197099151, 9.637939060929087},
+        {1, -9.442420524656606, 9.07802294456236},
+        {1, -9.069329135123306, 9.658515489139848},
+        {1, -9.306682910312364, 9.20831776028291},
+        {1, -9.033846541544232, 9.32904963306478},
+        {1, -9.706767953982897, 9.9204656840812},
+        {1, -9.855922299233484, 9.212398390928783},
+        {1, -9.31778377138365, 9.001381041592891},
+        {1, -9.498262395904716, 9.627240779587641},
+        {1, -9.165515191167106, 9.8269942856602},
+        {1, -9.975445549855277, 9.940934989111799},
+        {1, -9.083105286998059, 9.006127740460453},
+        {1, -9.570145038082837, 9.682155599203648},
+        {1, -9.61392195996382, 9.417864984298848},
+        {1, -9.274771331302999, 9.641773516631659},
+        {1, -9.296296304670749, 9.782496135034126},
+        {1, -9.906415110246952, 9.754391405446135},
+        {1, -9.401887484923442, 9.177845637020802},
+        {1, -9.530971211940608, 9.165119804525942},
+        {1, -9.82379861350907, 9.79567065636976},
+        {1, -9.652776399686564, 9.905939382705197},
+        {1, -9.876593047451918, 9.945310791455892},
+        {1, -9.663611565135188, 9.362793091580434},
+        {1, -9.199103361444621, 9.635196006461447},
+        {1, -9.190013322848332, 9.124127000468004},
+        {1, -9.29736354578434, 9.717999298890678},
+        {1, -9.220547853711237, 9.559927412569595},
+        {1, -9.300431356958706, 9.76396216541998},
+        {1, -9.157649670754807, 9.990846988919046},
+        {1, -9.681918677002109, 9.68618286595764},
+        {1, -9.309195235661146, 9.312880801021818},
+        {1, -9.061160475710913, 9.076614202325946},
+        {2, -9.062489260904384, -9.29639290758419},
+        {2, -9.228543182338143, -9.678377216077045},
+        {2, -9.058090832908235, -9.193945883550121},
+        {2, -9.133051729493113, -9.591373007767894},
+        {2, -9.287844094445548, -9.551255004015},
+        {2, -9.007505358739156, -9.364102496975889},
+        {2, -9.573448348548297, -9.721351111009751},
+        {2, -9.839063104064442, -9.913376420693114},
+        {2, -9.009615911555375, -9.726047024128608},
+        {2, -9.101017317976435, -9.704243867142955},
+        {2, -9.982108914119253, -9.16651010251761},
+        {2, -9.446194150458751, -9.254956921695555},
+        {2, -9.189473272816354, -9.810681137049205},
+        {2, -9.118077427599777, -9.540094810610913},
+        {2, -9.771250464767986, -9.523914718655663},
+        {2, -9.66962428717098, -9.363171620624835},
+        {2, -9.312167530669402, -9.343252976723711},
+        {2, -9.464223946364095, -9.030677424225916},
+        {2, -9.360044171938823, -9.307110078788382},
+        {2, -9.011501658023455, -9.36530250968901},
+        {2, -9.775885771959839, -9.99889314514122},
+        {2, -9.674611861667914, -9.258187855592231},
+        {2, -9.738640777018995, -9.111785670315005},
+        {2, -9.246690988432968, -9.721028941948624},
+        {2, -9.390261807995243, -9.588861735182837},
+        {2, -9.291113352727827, -9.269267155328981},
+        {2, -9.851335630543913, -9.706611637556188},
+        {2, -9.585157995064394, -9.405552049981731},
+        {2, -9.918436572526948, -9.16760421314763},
+        {2, -9.845493743518675, -9.355482956823167},
+        {2, -9.731220848845956, -9.225343258111073},
+        {2, -9.222705334863235, -9.494812693860784},
+        {2, -9.981016698450784, -9.905493543993186},
+        {2, -9.46735837748333, -9.4826245649012},
+        {2, -9.244414641225871, -9.747631208358092},
+        {2, -9.055383358563462, -9.531078141057671},
+        {2, -9.769432919539609, -9.61352749756392},
+        {2, -9.369900693663043, -9.108143337018905},
+        {2, -9.607028386780009, -9.114073621581822},
+        {2, -9.777391839524553, -9.011542490337462},
+        {2, -9.006992341646022, -9.807142738339437},
+        {2, -9.268800709859363, -9.64049865255139},
+        {2, -9.675247117678266, -9.59986912340877},
+        {2, -9.64637138569114, -9.373492063216789},
+        {2, -9.107966171477159, -9.89296888054194},
+        {2, -9.844813041035149, -9.265286980535892},
+        {2, -9.741557572466677, -9.332262102684087},
+        {2, -9.877113842998332, -9.236779890169021},
+        {2, -9.717067250147496, -9.064661056318842},
+        {2, -9.621588376526242, -9.877688077281952},
+        {2, -9.517814042484112, -9.540587835450802},
+        {2, -9.301056957528804, -9.825047942369075},
+        {2, -9.571496535251406, -9.7886645523611},
+        {2, -9.720509286872675, -9.391715190333258},
+        {2, -9.440573147395899, -9.788983529514448},
+        {2, -9.26187156355727, -9.6495064067468},
+        {2, -9.658496105019307, -9.56612823492413},
+        {2, -9.380443710902496, -9.68085867523561},
+        {2, -9.942337341048844, -9.051311192273833},
+        {2, -9.078217384202866, -9.916249447505033},
+        {2, -9.040049870218203, -9.034931091928817},
+        {2, -9.38309299369458, -9.652061835126116},
+        {2, -9.2185666133056, -9.230952753648268},
+        {2, -9.556045604713985, -9.68622417688499},
+        {2, -9.763408055045888, -9.879577482698977},
+        {2, -9.4247203087675, -9.639176769093654},
+        {2, -9.87358328609414, -9.895570735983382},
+        {2, -9.819441742886346, -9.8365613475581},
+        {2, -9.658089225310723, -9.489731885421016},
+        {2, -9.943990436893316, -9.452660725226394},
+        {2, -9.499576083220616, -9.936796975306573},
+        {2, -9.209278737078256, -9.515912871664437},
+        {2, -9.822627739746856, -9.208467277950026},
+        {2, -9.250697491903084, -9.388580147580788},
+        {2, -9.499425743259364, -9.350980719673753},
+        {2, -9.275926339651928, -9.617104986484284},
+        {2, -9.1796228747286, -9.600489090237376},
+        {2, -9.349551823375743, -9.006466892950566},
+        {2, -9.894633921415739, -9.68766261225829},
+        {2, -9.65858550958029, -9.981852526887},
+        {2, -9.260496691277194, -9.809097777192473},
+        {2, -9.819512412109138, -9.897278497873733},
+        {2, -9.095722203640902, -9.373361177409254},
+        {2, -9.350211015838992, -9.8070103689666},
+        {2, -9.666932714082296, -9.012476306081684},
+        {2, -9.808494394881976, -9.141856503904373},
+        {2, -9.659369482494562, -9.813220865668578},
+        {2, -9.695328684452264, -9.348824074007899},
+        {2, -9.559852026507784, -9.874175917293163},
+        {2, -9.09372192117967, -9.409697201374975},
+        {2, -9.287303427948462, -9.501710345463191},
+        {2, -9.559530752361578, -9.915461534620048},
+        {2, -9.582664553428488, -9.403076102016477},
+        {2, -9.52173572568699, -9.741375773070464},
+        {2, -9.65354706029232, -9.818082622224445},
+        {2, -9.415838021477068, -9.532580879297706},
+        {2, -9.574004758496413, -9.086286237660188},
+        {2, -9.270611925252807, -9.702167164438746},
+        {2, -9.95686463396123, -9.162427711243494},
+        {2, -9.274599236711888, -9.877754856511778},
+        {3, 9.14689232210878, -9.775341371070157},
+        {3, 9.023355885230728, -9.650091265039629},
+        {3, 9.445914402990603, -9.367844134021585},
+        {3, 9.12739637867819, -9.790557561179597},
+        {3, 9.041303217790349, -9.586261899702581},
+        {3, 9.205210383417626, -9.985844424729768},
+        {3, 9.760747183322884, -9.739749414833623},
+        {3, 9.43601987318095, -9.299718258558077},
+        {3, 9.14356842517825, -9.295462642558103},
+        {3, 9.833809705258039, -9.468846417300268},
+        {3, 9.718477547371677, -9.823866211360837},
+        {3, 9.922658697442182, -9.769889056711964},
+        {3, 9.33663363137869, -9.088267105115708},
+        {3, 9.137230799593524, -9.764401780467223},
+        {3, 9.858088980083506, -9.304992329702712},
+        {3, 9.382828855133841, -9.493306421449871},
+        {3, 9.798884510277261, -9.380868512833228},
+        {3, 9.62129491417874, -9.539240839001467},
+        {3, 9.779444510688629, -9.192918853610157},
+        {3, 9.200804425227417, -9.214343851278091},
+        {3, 9.775531213188497, -9.073023597174036},
+        {3, 9.390609731389022, -9.977531450420052},
+        {3, 9.852766749781729, -9.994823748876888},
+        {3, 9.206238360247045, -9.67091791978384},
+        {3, 9.188602950870685, -9.510463637602879},
+        {3, 9.331589555754434, -9.922823935052168},
+        {3, 9.476697182752012, -9.313064140116326},
+        {3, 9.356805613304504, -9.835977587036306},
+        {3, 9.339818380404573, -9.255810669018475},
+        {3, 9.17366847248557, -9.947584334056048},
+        {3, 9.53360390823212, -9.795041609984915},
+        {3, 9.609560038477422, -9.285015745600694},
+        {3, 9.577553857280723, -9.96914900300197},
+        {3, 9.464374595524664, -9.618239089480822},
+        {3, 9.398719356212853, -9.7406758194444},
+        {3, 9.154688949078198, -9.248998548314239},
+        {3, 9.679073636776373, -9.965328464852867},
+        {3, 9.47893626848198, -9.9671543632786},
+        {3, 9.068547258387513, -9.297377035663166},
+        {3, 9.076923603177063, -9.914463831030272},
+        {3, 9.976578331543791, -9.360722370503666},
+        {3, 9.089452654960278, -9.675962954595512},
+        {3, 9.070526769096297, -9.878206691195288},
+        {3, 9.930847945955737, -9.07583308430197},
+        {3, 9.241217613699337, -9.631175172125698},
+        {3, 9.124100921554351, -9.228953372107389},
+        {3, 9.508344880276217, -9.860603437908713},
+        {3, 9.11156100183317, -9.325392997885503},
+        {3, 9.817235693989044, -9.39425968469714},
+        {3, 9.001600449220064, -9.425174755596974},
+        {3, 9.548114105927628, -9.808330723888258},
+        {3, 9.26226050324015, -9.767116578977086},
+        {3, 9.614597629315545, -9.041844364395292},
+        {3, 9.538354218499835, -9.098393947752555},
+        {3, 9.103392813936214, -9.09952673162608},
+        {3, 9.420097750306217, -9.098700662928707},
+        {3, 9.751668557712422, -9.38794903932924},
+        {3, 9.931275926738792, -9.567768498966414},
+        {3, 9.046080675655736, -9.638494792341994},
+        {3, 9.770315794108765, -9.43037261292599},
+        {3, 9.752980345824852, -9.748629501818872},
+        {3, 9.451157497026747, -9.122294173303064},
+        {3, 9.8842318143802, -9.26500677925286},
+        {3, 9.757618739984443, -9.43137249310142},
+        {3, 9.312628300108653, -9.35390228978602},
+        {3, 9.290443903557156, -9.235565486135597},
+        {3, 9.006123561818931, -9.152783217337547},
+        {3, 9.570084759165916, -9.927282503148907},
+        {3, 9.421900208122063, -9.081045753111953},
+        {3, 9.653736596553786, -9.901709124803725},
+        {3, 9.18417654510616, -9.251983632346962},
+        {3, 9.528620521688604, -9.153806541933662},
+        {3, 9.804333603959915, -9.140503586471738},
+        {3, 9.450969957775413, -9.158071229394206},
+        {3, 9.20163405176059, -9.485982651544383},
+        {3, 9.54780101021909, -9.037382999154193},
+        {3, 9.075028540176401, -9.398829949196564},
+        {3, 9.874199751417624, -9.811808331246274},
+        {3, 9.954958362231842, -9.233648957978966},
+        {3, 9.03258466527879, -9.432021155003355},
+        {3, 9.611337142970585, -9.18050106929956},
+        {3, 9.748186934551581, -9.283395815931486},
+        {3, 9.203357880317562, -9.734448423320636},
+        {3, 9.918736141570848, -9.139927237002235},
+        {3, 9.923607379931829, -9.747509729243488},
+        {3, 9.11966639233584, -9.144799648581206},
+        {3, 9.332910738465808, -9.836313230806809},
+        {3, 9.72782406722705, -9.636979470475252},
+        {3, 9.587733884348484, -9.429388313887008},
+        {3, 9.713001308076503, -9.378155762534723},
+        {3, 9.553879064305177, -9.456119811781296},
+        {3, 9.326716553614768, -9.398273985573583},
+        {3, 9.845721054911849, -9.2609414976378},
+        {3, 9.43826634715752, -9.226109072709436},
+        {3, 9.46721793264904, -9.959943210987339},
+        {3, 9.47560676057465, -9.963482009295927},
+        {3, 9.006435968586619, -9.202759792205478},
+        {3, 9.053062605095485, -9.798289703474865},
+        {3, 9.959296741639132, -9.762961500922069},
+        {3, 9.882357321966778, -9.069477551120192}
+    };
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/bba6adf8/modules/ml/src/test/java/org/apache/ignite/ml/knn/ANNClassificationTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/knn/ANNClassificationTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/knn/ANNClassificationTest.java
index b44b7cd..aed6387 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/knn/ANNClassificationTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/knn/ANNClassificationTest.java
@@ -17,13 +17,11 @@
 
 package org.apache.ignite.ml.knn;
 
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
-import java.util.concurrent.ThreadLocalRandom;
 import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.common.TrainerTest;
 import org.apache.ignite.ml.knn.ann.ANNClassificationModel;
 import org.apache.ignite.ml.knn.ann.ANNClassificationTrainer;
 import org.apache.ignite.ml.knn.classification.NNStrategy;
@@ -31,82 +29,32 @@ import org.apache.ignite.ml.math.distances.EuclideanDistance;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.junit.Assert;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 /** Tests behaviour of ANNClassificationTest. */
-@RunWith(Parameterized.class)
-public class ANNClassificationTest {
-    /** Number of parts to be tested. */
-    private static final int[] partsToBeTested = new int[]{1, 2, 3, 4, 5, 7, 100};
-
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** Precision in test checks. */
-    private static final double PRECISION = 1e-2;
-
-    /** Number of partitions. */
-    @Parameterized.Parameter
-    public int parts;
-
-    /** Parameters. */
-    @Parameterized.Parameters(name = "Data divided on {0} partitions, training with batch size {1}")
-    public static Iterable<Integer[]> data() {
-        List<Integer[]> res = new ArrayList<>();
-
-        for (int part : partsToBeTested)
-            res.add(new Integer[]{part});
-
-        return res;
-    }
-
+public class ANNClassificationTest extends TrainerTest {
     /** */
     @Test
     public void testBinaryClassification() {
-        Map<Integer, double[]> data = new HashMap<>();
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
-
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(500, 600);
-            double y = rndY.nextDouble(500, 600);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = 0; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        Map<Integer, double[]> cacheMock = new HashMap<>();
 
-        for (int i = AMOUNT_OF_OBSERVATIONS; i < AMOUNT_OF_OBSERVATIONS * 2; i++) {
-            double x = rndX.nextDouble(-600, -500);
-            double y = rndY.nextDouble(-600, -500);
-            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
-            vec[0] = 1; // assign label.
-            vec[1] = x;
-            vec[2] = y;
-            data.put(i, vec);
-        }
+        for (int i = 0; i < twoClusters.length; i++)
+            cacheMock.put(i, twoClusters[i]);
 
         ANNClassificationTrainer trainer = new ANNClassificationTrainer()
             .withK(10)
             .withMaxIterations(10)
             .withEpsilon(1e-4)
             .withDistance(new EuclideanDistance())
-            .withSeed(0);
+            .withSeed(1234L);
 
         Assert.assertEquals(10, trainer.getK());
         Assert.assertEquals(10, trainer.getMaxIterations());
         TestUtils.assertEquals(1e-4, trainer.getEpsilon(), PRECISION);
         Assert.assertEquals(new EuclideanDistance(), trainer.getDistance());
-        Assert.assertEquals(0, trainer.getSeed());
+        Assert.assertEquals(1234L, trainer.getSeed());
 
         NNClassificationModel mdl = trainer.fit(
-            data,
+            cacheMock,
             parts,
             (k, v) -> VectorUtils.of(Arrays.copyOfRange(v, 1, v.length)),
             (k, v) -> v[0]
@@ -117,7 +65,7 @@ public class ANNClassificationTest {
         TestUtils.assertEquals(0, mdl.apply(VectorUtils.of(550, 550)), PRECISION);
         TestUtils.assertEquals(1, mdl.apply(VectorUtils.of(-550, -550)), PRECISION);
 
-        Assert.assertNotNull(((ANNClassificationModel)mdl).getCandidates());
+        Assert.assertNotNull(((ANNClassificationModel) mdl).getCandidates());
 
         Assert.assertTrue(mdl.toString().contains(NNStrategy.SIMPLE.name()));
         Assert.assertTrue(mdl.toString(true).contains(NNStrategy.SIMPLE.name()));