You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by gw...@apache.org on 2017/08/07 21:13:41 UTC

systemml git commit: [MINOR] Ensure jar loaded for jvm_stdout

Repository: systemml
Updated Branches:
  refs/heads/master 70e5f29e8 -> 98a9d653d


[MINOR] Ensure jar loaded for jvm_stdout

Use _get_spark_context() instead of SparkContext._active_spark_context.

Closes #606.


Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/98a9d653
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/98a9d653
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/98a9d653

Branch: refs/heads/master
Commit: 98a9d653de5aa9670b084d6480921fe98ce01861
Parents: 70e5f29
Author: Glenn Weidner <gw...@us.ibm.com>
Authored: Mon Aug 7 14:11:01 2017 -0700
Committer: Glenn Weidner <gw...@us.ibm.com>
Committed: Mon Aug 7 14:11:01 2017 -0700

----------------------------------------------------------------------
 src/main/python/systemml/mlcontext.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/98a9d653/src/main/python/systemml/mlcontext.py
----------------------------------------------------------------------
diff --git a/src/main/python/systemml/mlcontext.py b/src/main/python/systemml/mlcontext.py
index 4838f98..8ab683a 100644
--- a/src/main/python/systemml/mlcontext.py
+++ b/src/main/python/systemml/mlcontext.py
@@ -43,7 +43,8 @@ import threading, time
 _loadedSystemML = False
 def _get_spark_context():
     """
-    Internal method to get already initialized SparkContext.
+    Internal method to get already initialized SparkContext.  Developers should always use
+    _get_spark_context() instead of SparkContext._active_spark_context to ensure SystemML loaded.
 
     Returns
     -------
@@ -74,7 +75,7 @@ class jvm_stdout(object):
         Should flush the stdout in parallel
     """
     def __init__(self, parallel_flush=False):
-        self.util = SparkContext._active_spark_context._jvm.org.apache.sysml.api.ml.Utils()
+        self.util = _get_spark_context()._jvm.org.apache.sysml.api.ml.Utils()
         self.parallel_flush = parallel_flush
         self.t = threading.Thread(target=self.flush_stdout)
         self.stop = False