You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/01/26 23:58:42 UTC

spark git commit: [SPARK-12993][PYSPARK] Remove usage of ADD_FILES in pyspark

Repository: spark
Updated Branches:
  refs/heads/master 83507fea9 -> 19fdb21af


[SPARK-12993][PYSPARK] Remove usage of ADD_FILES in pyspark

environment variable ADD_FILES is created for adding python files on spark context to be distributed to executors (SPARK-865), this is deprecated now. User are encouraged to use --py-files for adding python files.

Author: Jeff Zhang <zj...@apache.org>

Closes #10913 from zjffdu/SPARK-12993.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/19fdb21a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/19fdb21a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/19fdb21a

Branch: refs/heads/master
Commit: 19fdb21afbf0eae4483cf6d4ef32daffd1994b89
Parents: 83507fe
Author: Jeff Zhang <zj...@apache.org>
Authored: Tue Jan 26 14:58:39 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Tue Jan 26 14:58:39 2016 -0800

----------------------------------------------------------------------
 python/pyspark/shell.py | 11 +----------
 1 file changed, 1 insertion(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/19fdb21a/python/pyspark/shell.py
----------------------------------------------------------------------
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 26cafca..7c37f75 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -32,15 +32,10 @@ from pyspark.context import SparkContext
 from pyspark.sql import SQLContext, HiveContext
 from pyspark.storagelevel import StorageLevel
 
-# this is the deprecated equivalent of ADD_JARS
-add_files = None
-if os.environ.get("ADD_FILES") is not None:
-    add_files = os.environ.get("ADD_FILES").split(',')
-
 if os.environ.get("SPARK_EXECUTOR_URI"):
     SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
 
-sc = SparkContext(pyFiles=add_files)
+sc = SparkContext()
 atexit.register(lambda: sc.stop())
 
 try:
@@ -68,10 +63,6 @@ print("Using Python version %s (%s, %s)" % (
     platform.python_build()[1]))
 print("SparkContext available as sc, %s available as sqlContext." % sqlContext.__class__.__name__)
 
-if add_files is not None:
-    print("Warning: ADD_FILES environment variable is deprecated, use --py-files argument instead")
-    print("Adding files: [%s]" % ", ".join(add_files))
-
 # The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
 # which allows us to execute the user's PYTHONSTARTUP file:
 _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org