You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2018/09/20 16:41:48 UTC

spark git commit: [MINOR][PYTHON] Use a helper in `PythonUtils` instead of direct accessing Scala package

Repository: spark
Updated Branches:
  refs/heads/master 67f2c6a55 -> 88e7e87bd


[MINOR][PYTHON] Use a helper in `PythonUtils` instead of direct accessing Scala package

## What changes were proposed in this pull request?

This PR proposes to use add a helper in `PythonUtils` instead of direct accessing Scala package.

## How was this patch tested?

Jenkins tests.

Closes #22483 from HyukjinKwon/minor-refactoring.

Authored-by: hyukjinkwon <gu...@apache.org>
Signed-off-by: hyukjinkwon <gu...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/88e7e87b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/88e7e87b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/88e7e87b

Branch: refs/heads/master
Commit: 88e7e87bd5c052e10f52d4bb97a9d78f5b524128
Parents: 67f2c6a
Author: hyukjinkwon <gu...@apache.org>
Authored: Fri Sep 21 00:41:42 2018 +0800
Committer: hyukjinkwon <gu...@apache.org>
Committed: Fri Sep 21 00:41:42 2018 +0800

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/api/python/PythonUtils.scala | 4 ++++
 python/pyspark/context.py                                        | 4 +---
 2 files changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/88e7e87b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
index 27a5e19..cdce371 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
@@ -74,4 +74,8 @@ private[spark] object PythonUtils {
   def toScalaMap[K, V](jm: java.util.Map[K, V]): Map[K, V] = {
     jm.asScala.toMap
   }
+
+  def getEncryptionEnabled(sc: JavaSparkContext): Boolean = {
+    sc.conf.get(org.apache.spark.internal.config.IO_ENCRYPTION_ENABLED)
+  }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/88e7e87b/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 2c92c29..87255c4 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -192,9 +192,7 @@ class SparkContext(object):
         # If encryption is enabled, we need to setup a server in the jvm to read broadcast
         # data via a socket.
         # scala's mangled names w/ $ in them require special treatment.
-        encryption_conf = self._jvm.org.apache.spark.internal.config.__getattr__("package$")\
-            .__getattr__("MODULE$").IO_ENCRYPTION_ENABLED()
-        self._encryption_enabled = self._jsc.sc().conf().get(encryption_conf)
+        self._encryption_enabled = self._jvm.PythonUtils.getEncryptionEnabled(self._jsc)
 
         self.pythonExec = os.environ.get("PYSPARK_PYTHON", 'python')
         self.pythonVer = "%d.%d" % sys.version_info[:2]


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org