You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by da...@apache.org on 2015/10/23 00:27:15 UTC

spark git commit: [SPARK-7021] Add JUnit output for Python unit tests

Repository: spark
Updated Branches:
  refs/heads/master 53e83a3a7 -> 163d53e82


[SPARK-7021] Add JUnit output for Python unit tests

WIP

Author: Gábor Lipták <gl...@gmail.com>

Closes #8323 from gliptak/SPARK-7021.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/163d53e8
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/163d53e8
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/163d53e8

Branch: refs/heads/master
Commit: 163d53e829c166f061589cc379f61642d4c9a40f
Parents: 53e83a3
Author: Gábor Lipták <gl...@gmail.com>
Authored: Thu Oct 22 15:27:11 2015 -0700
Committer: Davies Liu <da...@gmail.com>
Committed: Thu Oct 22 15:27:11 2015 -0700

----------------------------------------------------------------------
 python/pyspark/ml/tests.py        |  9 ++++++++-
 python/pyspark/mllib/tests.py     |  9 ++++++++-
 python/pyspark/sql/tests.py       |  9 ++++++++-
 python/pyspark/streaming/tests.py | 11 ++++++++++-
 python/pyspark/tests.py           | 19 ++++++++++++++-----
 5 files changed, 48 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/163d53e8/python/pyspark/ml/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/ml/tests.py b/python/pyspark/ml/tests.py
index 6a2577d..7a16cf5 100644
--- a/python/pyspark/ml/tests.py
+++ b/python/pyspark/ml/tests.py
@@ -20,6 +20,10 @@ Unit tests for Spark ML Python APIs.
 """
 
 import sys
+try:
+    import xmlrunner
+except ImportError:
+    xmlrunner = None
 
 if sys.version_info[:2] <= (2, 6):
     try:
@@ -368,4 +372,7 @@ class CrossValidatorTests(PySparkTestCase):
 
 
 if __name__ == "__main__":
-    unittest.main()
+    if xmlrunner:
+        unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
+    else:
+        unittest.main()

http://git-wip-us.apache.org/repos/asf/spark/blob/163d53e8/python/pyspark/mllib/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/mllib/tests.py b/python/pyspark/mllib/tests.py
index 2ad69a0..f8e8e0e 100644
--- a/python/pyspark/mllib/tests.py
+++ b/python/pyspark/mllib/tests.py
@@ -31,6 +31,10 @@ from numpy import (
 from numpy import sum as array_sum
 
 from py4j.protocol import Py4JJavaError
+try:
+    import xmlrunner
+except ImportError:
+    xmlrunner = None
 
 if sys.version > '3':
     basestring = str
@@ -1538,7 +1542,10 @@ class MLUtilsTests(MLlibTestCase):
 if __name__ == "__main__":
     if not _have_scipy:
         print("NOTE: Skipping SciPy tests as it does not seem to be installed")
-    unittest.main()
+    if xmlrunner:
+        unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
+    else:
+        unittest.main()
     if not _have_scipy:
         print("NOTE: SciPy tests were skipped as it does not seem to be installed")
     sc.stop()

http://git-wip-us.apache.org/repos/asf/spark/blob/163d53e8/python/pyspark/sql/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index f465e1f..6356d4b 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -31,6 +31,10 @@ import time
 import datetime
 
 import py4j
+try:
+    import xmlrunner
+except ImportError:
+    xmlrunner = None
 
 if sys.version_info[:2] <= (2, 6):
     try:
@@ -1222,4 +1226,7 @@ class HiveContextSQLTests(ReusedPySparkTestCase):
 
 
 if __name__ == "__main__":
-    unittest.main()
+    if xmlrunner:
+        unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
+    else:
+        unittest.main()

http://git-wip-us.apache.org/repos/asf/spark/blob/163d53e8/python/pyspark/streaming/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/tests.py b/python/pyspark/streaming/tests.py
index 4963425..2c908da 100644
--- a/python/pyspark/streaming/tests.py
+++ b/python/pyspark/streaming/tests.py
@@ -27,6 +27,11 @@ import struct
 import shutil
 from functools import reduce
 
+try:
+    import xmlrunner
+except ImportError:
+    xmlrunner = None
+
 if sys.version_info[:2] <= (2, 6):
     try:
         import unittest2 as unittest
@@ -1303,4 +1308,8 @@ if __name__ == "__main__":
     for testcase in testcases:
         sys.stderr.write("[Running %s]\n" % (testcase))
         tests = unittest.TestLoader().loadTestsFromTestCase(testcase)
-        unittest.TextTestRunner(verbosity=3).run(tests)
+        if xmlrunner:
+            unittest.main(tests, verbosity=3,
+                          testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
+        else:
+            unittest.TextTestRunner(verbosity=3).run(tests)

http://git-wip-us.apache.org/repos/asf/spark/blob/163d53e8/python/pyspark/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index 3c51809..5bd9447 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -35,6 +35,10 @@ import threading
 import hashlib
 
 from py4j.protocol import Py4JJavaError
+try:
+    import xmlrunner
+except ImportError:
+    xmlrunner = None
 
 if sys.version_info[:2] <= (2, 6):
     try:
@@ -249,10 +253,12 @@ class SerializationTestCase(unittest.TestCase):
 
     # Regression test for SPARK-3415
     def test_pickling_file_handles(self):
-        ser = CloudPickleSerializer()
-        out1 = sys.stderr
-        out2 = ser.loads(ser.dumps(out1))
-        self.assertEqual(out1, out2)
+        # to be corrected with SPARK-11160
+        if not xmlrunner:
+            ser = CloudPickleSerializer()
+            out1 = sys.stderr
+            out2 = ser.loads(ser.dumps(out1))
+            self.assertEqual(out1, out2)
 
     def test_func_globals(self):
 
@@ -2006,7 +2012,10 @@ if __name__ == "__main__":
         print("NOTE: Skipping SciPy tests as it does not seem to be installed")
     if not _have_numpy:
         print("NOTE: Skipping NumPy tests as it does not seem to be installed")
-    unittest.main()
+    if xmlrunner:
+        unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports'))
+    else:
+        unittest.main()
     if not _have_scipy:
         print("NOTE: SciPy tests were skipped as it does not seem to be installed")
     if not _have_numpy:


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org