You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ue...@apache.org on 2017/06/21 17:51:57 UTC
spark git commit: [SPARK-21125][PYTHON] Extend setJobDescription to
PySpark and JavaSpark APIs
Repository: spark
Updated Branches:
refs/heads/master 7a00c658d -> ba78514da
[SPARK-21125][PYTHON] Extend setJobDescription to PySpark and JavaSpark APIs
## What changes were proposed in this pull request?
Extend setJobDescription to PySpark and JavaSpark APIs
SPARK-21125
## How was this patch tested?
Testing was done by running a local Spark shell on the built UI. I originally had added a unit test but the PySpark context cannot easily access the Scala Spark Context's private variable with the Job Description key so I omitted the test, due to the simplicity of this addition.
Also ran the existing tests.
# Misc
This contribution is my original work and that I license the work to the project under the project's open source license.
Author: sjarvie <sj...@uber.com>
Closes #18332 from sjarvie/add_python_set_job_description.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ba78514d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ba78514d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ba78514d
Branch: refs/heads/master
Commit: ba78514da7bf2132873270b8bf39b50e54f4b094
Parents: 7a00c65
Author: sjarvie <sj...@uber.com>
Authored: Wed Jun 21 10:51:45 2017 -0700
Committer: Takuya UESHIN <ue...@databricks.com>
Committed: Wed Jun 21 10:51:45 2017 -0700
----------------------------------------------------------------------
.../scala/org/apache/spark/api/java/JavaSparkContext.scala | 6 ++++++
python/pyspark/context.py | 6 ++++++
2 files changed, 12 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/ba78514d/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index 9481156..f1936bf 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -757,6 +757,12 @@ class JavaSparkContext(val sc: SparkContext)
*/
def getLocalProperty(key: String): String = sc.getLocalProperty(key)
+ /**
+ * Set a human readable description of the current job.
+ * @since 2.3.0
+ */
+ def setJobDescription(value: String): Unit = sc.setJobDescription(value)
+
/** Control our logLevel. This overrides any user-defined log settings.
* @param logLevel The desired log level as a string.
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
http://git-wip-us.apache.org/repos/asf/spark/blob/ba78514d/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 3be0732..c4b7e63 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -942,6 +942,12 @@ class SparkContext(object):
"""
return self._jsc.getLocalProperty(key)
+ def setJobDescription(self, value):
+ """
+ Set a human readable description of the current job.
+ """
+ self._jsc.setJobDescription(value)
+
def sparkUser(self):
"""
Get SPARK_USER for user who is running SparkContext.
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org