You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by me...@apache.org on 2016/02/12 00:53:47 UTC

spark git commit: [MINOR][ML][PYSPARK] Cleanup test cases of clustering.py

Repository: spark
Updated Branches:
  refs/heads/master c8f667d7c -> 2426eb3e1


[MINOR][ML][PYSPARK] Cleanup test cases of clustering.py

Test cases should be removed from annotation of ```setXXX``` function, otherwise it will be parts of [Python API docs](https://spark.apache.org/docs/latest/api/python/pyspark.ml.html#pyspark.ml.clustering.KMeans.setInitMode).
cc mengxr jkbradley

Author: Yanbo Liang <yb...@gmail.com>

Closes #10975 from yanboliang/clustering-cleanup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2426eb3e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2426eb3e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2426eb3e

Branch: refs/heads/master
Commit: 2426eb3e167fece19831070594247e9481dbbe2a
Parents: c8f667d
Author: Yanbo Liang <yb...@gmail.com>
Authored: Thu Feb 11 15:53:45 2016 -0800
Committer: Xiangrui Meng <me...@databricks.com>
Committed: Thu Feb 11 15:53:45 2016 -0800

----------------------------------------------------------------------
 python/pyspark/ml/clustering.py | 15 ---------------
 python/pyspark/ml/tests.py      |  9 +++++++++
 2 files changed, 9 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2426eb3e/python/pyspark/ml/clustering.py
----------------------------------------------------------------------
diff --git a/python/pyspark/ml/clustering.py b/python/pyspark/ml/clustering.py
index 60d1c9a..12afb88 100644
--- a/python/pyspark/ml/clustering.py
+++ b/python/pyspark/ml/clustering.py
@@ -113,10 +113,6 @@ class KMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol
     def setK(self, value):
         """
         Sets the value of :py:attr:`k`.
-
-        >>> algo = KMeans().setK(10)
-        >>> algo.getK()
-        10
         """
         self._paramMap[self.k] = value
         return self
@@ -132,13 +128,6 @@ class KMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol
     def setInitMode(self, value):
         """
         Sets the value of :py:attr:`initMode`.
-
-        >>> algo = KMeans()
-        >>> algo.getInitMode()
-        'k-means||'
-        >>> algo = algo.setInitMode("random")
-        >>> algo.getInitMode()
-        'random'
         """
         self._paramMap[self.initMode] = value
         return self
@@ -154,10 +143,6 @@ class KMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol
     def setInitSteps(self, value):
         """
         Sets the value of :py:attr:`initSteps`.
-
-        >>> algo = KMeans().setInitSteps(10)
-        >>> algo.getInitSteps()
-        10
         """
         self._paramMap[self.initSteps] = value
         return self

http://git-wip-us.apache.org/repos/asf/spark/blob/2426eb3e/python/pyspark/ml/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/ml/tests.py b/python/pyspark/ml/tests.py
index 54806ee..e93a4e1 100644
--- a/python/pyspark/ml/tests.py
+++ b/python/pyspark/ml/tests.py
@@ -39,6 +39,7 @@ import tempfile
 
 from pyspark.ml import Estimator, Model, Pipeline, Transformer
 from pyspark.ml.classification import LogisticRegression
+from pyspark.ml.clustering import KMeans
 from pyspark.ml.evaluation import RegressionEvaluator
 from pyspark.ml.feature import *
 from pyspark.ml.param import Param, Params
@@ -243,6 +244,14 @@ class ParamTests(PySparkTestCase):
                        "maxIter: max number of iterations (>= 0). (default: 10, current: 100)",
                        "seed: random seed. (default: 41, current: 43)"]))
 
+    def test_kmeans_param(self):
+        algo = KMeans()
+        self.assertEqual(algo.getInitMode(), "k-means||")
+        algo.setK(10)
+        self.assertEqual(algo.getK(), 10)
+        algo.setInitSteps(10)
+        self.assertEqual(algo.getInitSteps(), 10)
+
     def test_hasseed(self):
         noSeedSpecd = TestParams()
         withSeedSpecd = TestParams(seed=42)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org