You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/03 20:25:04 UTC

[13/21] git commit: deprecate "spark" script and SPAKR_CLASSPATH environment variable

deprecate "spark" script and SPAKR_CLASSPATH environment variable


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/52ccf4f8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/52ccf4f8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/52ccf4f8

Branch: refs/heads/master
Commit: 52ccf4f859d92ed9e86d3720a983ac2c4a1c23bf
Parents: cc37b31
Author: Andrew xia <ju...@intel.com>
Authored: Sat Oct 12 14:34:14 2013 +0800
Committer: Andrew xia <ju...@intel.com>
Committed: Sat Oct 12 14:34:14 2013 +0800

----------------------------------------------------------------------
 bin/spark                                       | 92 --------------------
 core/pom.xml                                    |  1 -
 .../scala/org/apache/spark/SparkContext.scala   |  2 +-
 repl-bin/src/deb/bin/run                        |  3 +-
 repl/pom.xml                                    |  1 -
 sbin/compute-classpath.cmd                      |  2 +-
 sbin/compute-classpath.sh                       |  2 +-
 7 files changed, 4 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/bin/spark
----------------------------------------------------------------------
diff --git a/bin/spark b/bin/spark
deleted file mode 100755
index f5f7440..0000000
--- a/bin/spark
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-SCALA_VERSION=2.9.3
-
-# Figure out where the Scala framework is installed
-FWDIR="$(cd `dirname $0`/..; pwd)"
-
-# Export this as SPARK_HOME
-export SPARK_HOME="$FWDIR"
-
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
-  . $FWDIR/conf/spark-env.sh
-fi
-
-if [ -z "$1" ]; then
-  echo "Usage: spark <class> [<args>]" >&2
-  echo "Usage: export SPARK_CLASSPATH before running the command" >&2
-  exit 1
-fi
-
-
-# Find the java binary
-if [ -n "${JAVA_HOME}" ]; then
-  RUNNER="${JAVA_HOME}/bin/java"
-else
-  if [ `command -v java` ]; then
-    RUNNER="java"
-  else
-    echo "JAVA_HOME is not set" >&2
-    exit 1
-  fi
-fi
-
-# Set SPARK_MEM if it isn't already set
-SPARK_MEM=${SPARK_MEM:-512m}
-export SPARK_MEM
-
-# Set APP_MEM if it isn't already set, we use this for this process as the app driver process may need 
-# as much memory as specified in SPARK_MEM
-APP_MEM=${APP_MEM:-512m}
-
-# Set JAVA_OPTS to be able to load native libraries and to set heap size
-JAVA_OPTS="$OUR_JAVA_OPTS"
-JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH"
-JAVA_OPTS="$JAVA_OPTS -Xms$APP_MEM -Xmx$APP_MEM"
-# Load extra JAVA_OPTS from conf/java-opts, if it exists
-if [ -e "$FWDIR/conf/java-opts" ] ; then
-  JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
-fi
-export JAVA_OPTS
-# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
-
-if [ ! -f "$FWDIR/RELEASE" ]; then
-  # Exit if the user hasn't compiled Spark
-  ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
-  if [[ $? != 0 ]]; then
-    echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
-    echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
-    exit 1
-  fi
-fi
-
-# Compute classpath using external script
-CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
-export CLASSPATH
-
-if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
-  echo -n "Spark Command: "
-  echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
-  echo "========================================"
-  echo
-fi
-
-exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 9c2d604..8359fef 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -226,7 +226,6 @@
           <environmentVariables>
             <SPARK_HOME>${basedir}/..</SPARK_HOME>
             <SPARK_TESTING>1</SPARK_TESTING>
-            <SPARK_CLASSPATH>${spark.classpath}</SPARK_CLASSPATH>
           </environmentVariables>
         </configuration>
       </plugin>

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 912ce75..ce7c4fe 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -132,7 +132,7 @@ class SparkContext(
   // Environment variables to pass to our executors
   private[spark] val executorEnvs = HashMap[String, String]()
   // Note: SPARK_MEM is included for Mesos, but overwritten for standalone mode in ExecutorRunner
-  for (key <- Seq("SPARK_CLASSPATH", "SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS", "SPARK_TESTING")) {
+  for (key <- Seq("SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS", "SPARK_TESTING")) {
     val value = System.getenv(key)
     if (value != null) {
       executorEnvs(key) = value

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/repl-bin/src/deb/bin/run
----------------------------------------------------------------------
diff --git a/repl-bin/src/deb/bin/run b/repl-bin/src/deb/bin/run
index 8b5d830..d34f189 100755
--- a/repl-bin/src/deb/bin/run
+++ b/repl-bin/src/deb/bin/run
@@ -48,8 +48,7 @@ fi
 export JAVA_OPTS
 
 # Build up classpath
-CLASSPATH="$SPARK_CLASSPATH"
-CLASSPATH+=":$FWDIR/conf"
+CLASSPATH=":$FWDIR/conf"
 for jar in `find $FWDIR -name '*jar'`; do
   CLASSPATH+=":$jar"
 done

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/repl/pom.xml
----------------------------------------------------------------------
diff --git a/repl/pom.xml b/repl/pom.xml
index 2826c07..f71184f 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -125,7 +125,6 @@
           <environmentVariables>
             <SPARK_HOME>${basedir}/..</SPARK_HOME>
             <SPARK_TESTING>1</SPARK_TESTING>
-            <SPARK_CLASSPATH>${spark.classpath}</SPARK_CLASSPATH>
           </environmentVariables>
         </configuration>
       </plugin>

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/sbin/compute-classpath.cmd
----------------------------------------------------------------------
diff --git a/sbin/compute-classpath.cmd b/sbin/compute-classpath.cmd
index cf38188..e0b8a8e 100644
--- a/sbin/compute-classpath.cmd
+++ b/sbin/compute-classpath.cmd
@@ -29,7 +29,7 @@ rem Load environment variables from conf\spark-env.cmd, if it exists
 if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
 
 rem Build up classpath
-set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf
+set CLASSPATH=%FWDIR%conf
 if exist "%FWDIR%RELEASE" (
   for %%d in ("%FWDIR%jars\spark-assembly*.jar") do (
     set ASSEMBLY_JAR=%%d

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/52ccf4f8/sbin/compute-classpath.sh
----------------------------------------------------------------------
diff --git a/sbin/compute-classpath.sh b/sbin/compute-classpath.sh
index d9217ec..cfe5fe7 100755
--- a/sbin/compute-classpath.sh
+++ b/sbin/compute-classpath.sh
@@ -31,7 +31,7 @@ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
 fi
 
 # Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
+CLASSPATH="$FWDIR/conf"
 if [ -f "$FWDIR/RELEASE" ]; then
   ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
 else