You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2017/04/02 14:31:17 UTC

spark git commit: [SPARK-20123][BUILD] SPARK_HOME variable might have spaces in it(e.g. $SPARK…

Repository: spark
Updated Branches:
  refs/heads/master d40cbb861 -> 76de2d115


[SPARK-20123][BUILD] SPARK_HOME variable might have spaces in it(e.g. $SPARK\u2026

JIRA Issue: https://issues.apache.org/jira/browse/SPARK-20123

## What changes were proposed in this pull request?

If $SPARK_HOME or $FWDIR variable contains spaces, then use "./dev/make-distribution.sh --name custom-spark --tgz -Psparkr -Phadoop-2.7 -Phive -Phive-thriftserver -Pmesos -Pyarn" build spark will failed.

## How was this patch tested?

manual tests

Author: zuotingbing <zu...@zte.com.cn>

Closes #17452 from zuotingbing/spark-bulid.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/76de2d11
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/76de2d11
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/76de2d11

Branch: refs/heads/master
Commit: 76de2d115364aa6a1fdaacdfae05f0c695c953b8
Parents: d40cbb8
Author: zuotingbing <zu...@zte.com.cn>
Authored: Sun Apr 2 15:31:13 2017 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Sun Apr 2 15:31:13 2017 +0100

----------------------------------------------------------------------
 R/check-cran.sh             | 20 ++++++++++----------
 R/create-docs.sh            | 10 +++++-----
 R/create-rd.sh              |  8 ++++----
 R/install-dev.sh            | 14 +++++++-------
 R/install-source-package.sh | 20 ++++++++++----------
 dev/make-distribution.sh    | 32 ++++++++++++++++----------------
 6 files changed, 52 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/R/check-cran.sh
----------------------------------------------------------------------
diff --git a/R/check-cran.sh b/R/check-cran.sh
index a188b14..22cc9c6 100755
--- a/R/check-cran.sh
+++ b/R/check-cran.sh
@@ -20,18 +20,18 @@
 set -o pipefail
 set -e
 
-FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
-pushd $FWDIR > /dev/null
+FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
+pushd "$FWDIR" > /dev/null
 
-. $FWDIR/find-r.sh
+. "$FWDIR/find-r.sh"
 
 # Install the package (this is required for code in vignettes to run when building it later)
 # Build the latest docs, but not vignettes, which is built with the package next
-. $FWDIR/install-dev.sh
+. "$FWDIR/install-dev.sh"
 
 # Build source package with vignettes
 SPARK_HOME="$(cd "${FWDIR}"/..; pwd)"
-. "${SPARK_HOME}"/bin/load-spark-env.sh
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 if [ -f "${SPARK_HOME}/RELEASE" ]; then
   SPARK_JARS_DIR="${SPARK_HOME}/jars"
 else
@@ -40,16 +40,16 @@ fi
 
 if [ -d "$SPARK_JARS_DIR" ]; then
   # Build a zip file containing the source package with vignettes
-  SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/"R CMD build $FWDIR/pkg
+  SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD build "$FWDIR/pkg"
 
   find pkg/vignettes/. -not -name '.' -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
 else
-  echo "Error Spark JARs not found in $SPARK_HOME"
+  echo "Error Spark JARs not found in '$SPARK_HOME'"
   exit 1
 fi
 
 # Run check as-cran.
-VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
+VERSION=`grep Version "$FWDIR/pkg/DESCRIPTION" | awk '{print $NF}'`
 
 CRAN_CHECK_OPTIONS="--as-cran"
 
@@ -67,10 +67,10 @@ echo "Running CRAN check with $CRAN_CHECK_OPTIONS options"
 
 if [ -n "$NO_TESTS" ] && [ -n "$NO_MANUAL" ]
 then
-  "$R_SCRIPT_PATH/"R CMD check $CRAN_CHECK_OPTIONS SparkR_"$VERSION".tar.gz
+  "$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
 else
   # This will run tests and/or build vignettes, and require SPARK_HOME
-  SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/"R CMD check $CRAN_CHECK_OPTIONS SparkR_"$VERSION".tar.gz
+  SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
 fi
 
 popd > /dev/null

http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/R/create-docs.sh
----------------------------------------------------------------------
diff --git a/R/create-docs.sh b/R/create-docs.sh
index 6bef7e7..310dbc5 100755
--- a/R/create-docs.sh
+++ b/R/create-docs.sh
@@ -33,15 +33,15 @@ export FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
 export SPARK_HOME="$(cd "`dirname "${BASH_SOURCE[0]}"`"/..; pwd)"
 
 # Required for setting SPARK_SCALA_VERSION
-. "${SPARK_HOME}"/bin/load-spark-env.sh
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 echo "Using Scala $SPARK_SCALA_VERSION"
 
-pushd $FWDIR > /dev/null
-. $FWDIR/find-r.sh
+pushd "$FWDIR" > /dev/null
+. "$FWDIR/find-r.sh"
 
 # Install the package (this will also generate the Rd files)
-. $FWDIR/install-dev.sh
+. "$FWDIR/install-dev.sh"
 
 # Now create HTML files
 
@@ -49,7 +49,7 @@ pushd $FWDIR > /dev/null
 mkdir -p pkg/html
 pushd pkg/html
 
-"$R_SCRIPT_PATH/"Rscript -e 'libDir <- "../../lib"; library(SparkR, lib.loc=libDir); library(knitr); knit_rd("SparkR", links = tools::findHTMLlinks(paste(libDir, "SparkR", sep="/")))'
+"$R_SCRIPT_PATH/Rscript" -e 'libDir <- "../../lib"; library(SparkR, lib.loc=libDir); library(knitr); knit_rd("SparkR", links = tools::findHTMLlinks(paste(libDir, "SparkR", sep="/")))'
 
 popd
 

http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/R/create-rd.sh
----------------------------------------------------------------------
diff --git a/R/create-rd.sh b/R/create-rd.sh
index d17e161..ff622a4 100755
--- a/R/create-rd.sh
+++ b/R/create-rd.sh
@@ -29,9 +29,9 @@
 set -o pipefail
 set -e
 
-FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
-pushd $FWDIR > /dev/null
-. $FWDIR/find-r.sh
+FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
+pushd "$FWDIR" > /dev/null
+. "$FWDIR/find-r.sh"
 
 # Generate Rd files if devtools is installed
-"$R_SCRIPT_PATH/"Rscript -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'
+"$R_SCRIPT_PATH/Rscript" -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'

http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/R/install-dev.sh
----------------------------------------------------------------------
diff --git a/R/install-dev.sh b/R/install-dev.sh
index 45e6411..d613552 100755
--- a/R/install-dev.sh
+++ b/R/install-dev.sh
@@ -29,21 +29,21 @@
 set -o pipefail
 set -e
 
-FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
+FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
 LIB_DIR="$FWDIR/lib"
 
-mkdir -p $LIB_DIR
+mkdir -p "$LIB_DIR"
 
-pushd $FWDIR > /dev/null
-. $FWDIR/find-r.sh
+pushd "$FWDIR" > /dev/null
+. "$FWDIR/find-r.sh"
 
-. $FWDIR/create-rd.sh
+. "$FWDIR/create-rd.sh"
 
 # Install SparkR to $LIB_DIR
-"$R_SCRIPT_PATH/"R CMD INSTALL --library=$LIB_DIR $FWDIR/pkg/
+"$R_SCRIPT_PATH/R" CMD INSTALL --library="$LIB_DIR" "$FWDIR/pkg/"
 
 # Zip the SparkR package so that it can be distributed to worker nodes on YARN
-cd $LIB_DIR
+cd "$LIB_DIR"
 jar cfM "$LIB_DIR/sparkr.zip" SparkR
 
 popd > /dev/null

http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/R/install-source-package.sh
----------------------------------------------------------------------
diff --git a/R/install-source-package.sh b/R/install-source-package.sh
index c6e443c..8de3569 100755
--- a/R/install-source-package.sh
+++ b/R/install-source-package.sh
@@ -29,28 +29,28 @@
 set -o pipefail
 set -e
 
-FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
-pushd $FWDIR > /dev/null
-. $FWDIR/find-r.sh
+FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
+pushd "$FWDIR" > /dev/null
+. "$FWDIR/find-r.sh"
 
 if [ -z "$VERSION" ]; then
-  VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
+  VERSION=`grep Version "$FWDIR/pkg/DESCRIPTION" | awk '{print $NF}'`
 fi
 
-if [ ! -f "$FWDIR"/SparkR_"$VERSION".tar.gz ]; then
-  echo -e "R source package file $FWDIR/SparkR_$VERSION.tar.gz is not found."
+if [ ! -f "$FWDIR/SparkR_$VERSION.tar.gz" ]; then
+  echo -e "R source package file '$FWDIR/SparkR_$VERSION.tar.gz' is not found."
   echo -e "Please build R source package with check-cran.sh"
   exit -1;
 fi
 
 echo "Removing lib path and installing from source package"
 LIB_DIR="$FWDIR/lib"
-rm -rf $LIB_DIR
-mkdir -p $LIB_DIR
-"$R_SCRIPT_PATH/"R CMD INSTALL SparkR_"$VERSION".tar.gz --library=$LIB_DIR
+rm -rf "$LIB_DIR"
+mkdir -p "$LIB_DIR"
+"$R_SCRIPT_PATH/R" CMD INSTALL "SparkR_$VERSION.tar.gz" --library="$LIB_DIR"
 
 # Zip the SparkR package so that it can be distributed to worker nodes on YARN
-pushd $LIB_DIR > /dev/null
+pushd "$LIB_DIR" > /dev/null
 jar cfM "$LIB_DIR/sparkr.zip" SparkR
 popd > /dev/null
 

http://git-wip-us.apache.org/repos/asf/spark/blob/76de2d11/dev/make-distribution.sh
----------------------------------------------------------------------
diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh
index 769cbda..48a8244 100755
--- a/dev/make-distribution.sh
+++ b/dev/make-distribution.sh
@@ -140,7 +140,7 @@ echo "Spark version is $VERSION"
 if [ "$MAKE_TGZ" == "true" ]; then
   echo "Making spark-$VERSION-bin-$NAME.tgz"
 else
-  echo "Making distribution for Spark $VERSION in $DISTDIR..."
+  echo "Making distribution for Spark $VERSION in '$DISTDIR'..."
 fi
 
 # Build uber fat JAR
@@ -170,7 +170,7 @@ cp "$SPARK_HOME"/assembly/target/scala*/jars/* "$DISTDIR/jars/"
 
 # Only create the yarn directory if the yarn artifacts were build.
 if [ -f "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar ]; then
-  mkdir "$DISTDIR"/yarn
+  mkdir "$DISTDIR/yarn"
   cp "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar "$DISTDIR/yarn"
 fi
 
@@ -179,7 +179,7 @@ mkdir -p "$DISTDIR/examples/jars"
 cp "$SPARK_HOME"/examples/target/scala*/jars/* "$DISTDIR/examples/jars"
 
 # Deduplicate jars that have already been packaged as part of the main Spark dependencies.
-for f in "$DISTDIR/examples/jars/"*; do
+for f in "$DISTDIR"/examples/jars/*; do
   name=$(basename "$f")
   if [ -f "$DISTDIR/jars/$name" ]; then
     rm "$DISTDIR/examples/jars/$name"
@@ -188,14 +188,14 @@ done
 
 # Copy example sources (needed for python and SQL)
 mkdir -p "$DISTDIR/examples/src/main"
-cp -r "$SPARK_HOME"/examples/src/main "$DISTDIR/examples/src/"
+cp -r "$SPARK_HOME/examples/src/main" "$DISTDIR/examples/src/"
 
 # Copy license and ASF files
 cp "$SPARK_HOME/LICENSE" "$DISTDIR"
 cp -r "$SPARK_HOME/licenses" "$DISTDIR"
 cp "$SPARK_HOME/NOTICE" "$DISTDIR"
 
-if [ -e "$SPARK_HOME"/CHANGES.txt ]; then
+if [ -e "$SPARK_HOME/CHANGES.txt" ]; then
   cp "$SPARK_HOME/CHANGES.txt" "$DISTDIR"
 fi
 
@@ -217,43 +217,43 @@ fi
 # Make R package - this is used for both CRAN release and packing R layout into distribution
 if [ "$MAKE_R" == "true" ]; then
   echo "Building R source package"
-  R_PACKAGE_VERSION=`grep Version $SPARK_HOME/R/pkg/DESCRIPTION | awk '{print $NF}'`
+  R_PACKAGE_VERSION=`grep Version "$SPARK_HOME/R/pkg/DESCRIPTION" | awk '{print $NF}'`
   pushd "$SPARK_HOME/R" > /dev/null
   # Build source package and run full checks
   # Do not source the check-cran.sh - it should be run from where it is for it to set SPARK_HOME
-  NO_TESTS=1 "$SPARK_HOME/"R/check-cran.sh
+  NO_TESTS=1 "$SPARK_HOME/R/check-cran.sh"
 
   # Move R source package to match the Spark release version if the versions are not the same.
   # NOTE(shivaram): `mv` throws an error on Linux if source and destination are same file
   if [ "$R_PACKAGE_VERSION" != "$VERSION" ]; then
-    mv $SPARK_HOME/R/SparkR_"$R_PACKAGE_VERSION".tar.gz $SPARK_HOME/R/SparkR_"$VERSION".tar.gz
+    mv "$SPARK_HOME/R/SparkR_$R_PACKAGE_VERSION.tar.gz" "$SPARK_HOME/R/SparkR_$VERSION.tar.gz"
   fi
 
   # Install source package to get it to generate vignettes rds files, etc.
-  VERSION=$VERSION "$SPARK_HOME/"R/install-source-package.sh
+  VERSION=$VERSION "$SPARK_HOME/R/install-source-package.sh"
   popd > /dev/null
 else
   echo "Skipping building R source package"
 fi
 
 # Copy other things
-mkdir "$DISTDIR"/conf
-cp "$SPARK_HOME"/conf/*.template "$DISTDIR"/conf
+mkdir "$DISTDIR/conf"
+cp "$SPARK_HOME"/conf/*.template "$DISTDIR/conf"
 cp "$SPARK_HOME/README.md" "$DISTDIR"
 cp -r "$SPARK_HOME/bin" "$DISTDIR"
 cp -r "$SPARK_HOME/python" "$DISTDIR"
 
 # Remove the python distribution from dist/ if we built it
 if [ "$MAKE_PIP" == "true" ]; then
-  rm -f $DISTDIR/python/dist/pyspark-*.tar.gz
+  rm -f "$DISTDIR"/python/dist/pyspark-*.tar.gz
 fi
 
 cp -r "$SPARK_HOME/sbin" "$DISTDIR"
 # Copy SparkR if it exists
-if [ -d "$SPARK_HOME"/R/lib/SparkR ]; then
-  mkdir -p "$DISTDIR"/R/lib
-  cp -r "$SPARK_HOME/R/lib/SparkR" "$DISTDIR"/R/lib
-  cp "$SPARK_HOME/R/lib/sparkr.zip" "$DISTDIR"/R/lib
+if [ -d "$SPARK_HOME/R/lib/SparkR" ]; then
+  mkdir -p "$DISTDIR/R/lib"
+  cp -r "$SPARK_HOME/R/lib/SparkR" "$DISTDIR/R/lib"
+  cp "$SPARK_HOME/R/lib/sparkr.zip" "$DISTDIR/R/lib"
 fi
 
 if [ "$MAKE_TGZ" == "true" ]; then


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org