You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/07/30 04:02:11 UTC

git commit: [SPARK-2305] [PySpark] Update Py4J to version 0.8.2.1

Repository: spark
Updated Branches:
  refs/heads/master 86534d0f5 -> 22649b6cd


[SPARK-2305] [PySpark] Update Py4J to version 0.8.2.1

Author: Josh Rosen <jo...@apache.org>

Closes #1626 from JoshRosen/SPARK-2305 and squashes the following commits:

03fb283 [Josh Rosen] Update Py4J to version 0.8.2.1.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/22649b6c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/22649b6c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/22649b6c

Branch: refs/heads/master
Commit: 22649b6cde8e18f043f122bce46f446174d00f6c
Parents: 86534d0
Author: Josh Rosen <jo...@apache.org>
Authored: Tue Jul 29 19:02:06 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Tue Jul 29 19:02:06 2014 -0700

----------------------------------------------------------------------
 LICENSE                                            |   4 ++--
 bin/pyspark                                        |   2 +-
 bin/pyspark2.cmd                                   |   2 +-
 core/pom.xml                                       |   2 +-
 .../org/apache/spark/api/python/PythonUtils.scala  |   2 +-
 python/lib/py4j-0.8.1-src.zip                      | Bin 37662 -> 0 bytes
 python/lib/py4j-0.8.2.1-src.zip                    | Bin 0 -> 37562 bytes
 sbin/spark-config.sh                               |   2 +-
 sbin/spark-executor                                |   2 +-
 9 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/LICENSE
----------------------------------------------------------------------
diff --git a/LICENSE b/LICENSE
index 65e1f48..76a3601 100644
--- a/LICENSE
+++ b/LICENSE
@@ -272,7 +272,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 
 ========================================================================
-For Py4J (python/lib/py4j0.7.egg and files in assembly/lib/net/sf/py4j):
+For Py4J (python/lib/py4j-0.8.2.1-src.zip)
 ========================================================================
 
 Copyright (c) 2009-2011, Barthelemy Dagenais All rights reserved.
@@ -532,7 +532,7 @@ The following components are provided under a BSD-style license. See project lin
      (New BSD license) Protocol Buffer Java API (org.spark-project.protobuf:protobuf-java:2.4.1-shaded - http://code.google.com/p/protobuf)
      (The BSD License) Fortran to Java ARPACK (net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
      (The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - http://xmlenc.sourceforge.net)
-     (The New BSD License) Py4J (net.sf.py4j:py4j:0.8.1 - http://py4j.sourceforge.net/)
+     (The New BSD License) Py4J (net.sf.py4j:py4j:0.8.2.1 - http://py4j.sourceforge.net/)
      (Two-clause BSD-style license) JUnit-Interface (com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
      (ISC/BSD License) jbcrypt (org.mindrot:jbcrypt:0.3m - http://www.mindrot.org/)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/bin/pyspark
----------------------------------------------------------------------
diff --git a/bin/pyspark b/bin/pyspark
index 69b056f..39a20e2 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -52,7 +52,7 @@ export PYSPARK_PYTHON
 
 # Add the PySpark classes to the Python path:
 export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP=$PYTHONSTARTUP

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/bin/pyspark2.cmd
----------------------------------------------------------------------
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 0ef9eea..2c4b08a 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -45,7 +45,7 @@ rem Figure out which Python to use.
 if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
 
 set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
-set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.1-src.zip;%PYTHONPATH%
+set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
 
 set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
 set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index a247434..4f06109 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -275,7 +275,7 @@
     <dependency>
       <groupId>net.sf.py4j</groupId>
       <artifactId>py4j</artifactId>
-      <version>0.8.1</version>
+      <version>0.8.2.1</version>
     </dependency>
   </dependencies>
   <build>

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
index 6d3e257..52c7071 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
@@ -29,7 +29,7 @@ private[spark] object PythonUtils {
     val pythonPath = new ArrayBuffer[String]
     for (sparkHome <- sys.env.get("SPARK_HOME")) {
       pythonPath += Seq(sparkHome, "python").mkString(File.separator)
-      pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.1-src.zip").mkString(File.separator)
+      pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.2.1-src.zip").mkString(File.separator)
     }
     pythonPath ++= SparkContext.jarOfObject(this)
     pythonPath.mkString(File.pathSeparator)

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/python/lib/py4j-0.8.1-src.zip
----------------------------------------------------------------------
diff --git a/python/lib/py4j-0.8.1-src.zip b/python/lib/py4j-0.8.1-src.zip
deleted file mode 100644
index 2069a32..0000000
Binary files a/python/lib/py4j-0.8.1-src.zip and /dev/null differ

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/python/lib/py4j-0.8.2.1-src.zip
----------------------------------------------------------------------
diff --git a/python/lib/py4j-0.8.2.1-src.zip b/python/lib/py4j-0.8.2.1-src.zip
new file mode 100644
index 0000000..5203b84
Binary files /dev/null and b/python/lib/py4j-0.8.2.1-src.zip differ

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/sbin/spark-config.sh
----------------------------------------------------------------------
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index 147b506..5c87da5 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -36,4 +36,4 @@ export SPARK_HOME=${SPARK_PREFIX}
 export SPARK_CONF_DIR="$SPARK_HOME/conf"
 # Add the PySpark classes to the PYTHONPATH:
 export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH

http://git-wip-us.apache.org/repos/asf/spark/blob/22649b6c/sbin/spark-executor
----------------------------------------------------------------------
diff --git a/sbin/spark-executor b/sbin/spark-executor
index 336549f..3621321 100755
--- a/sbin/spark-executor
+++ b/sbin/spark-executor
@@ -20,7 +20,7 @@
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
 export PYTHONPATH=$FWDIR/python:$PYTHONPATH
-export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
 
 echo "Running spark-executor with framework dir = $FWDIR"
 exec $FWDIR/bin/spark-class org.apache.spark.executor.MesosExecutorBackend