You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by fe...@apache.org on 2016/02/19 01:42:35 UTC
incubator-zeppelin git commit: Travis - spark script should stop on
error
Repository: incubator-zeppelin
Updated Branches:
refs/heads/master 54f728943 -> d74f98ab3
Travis - spark script should stop on error
### What is this PR for?
There has been a few cases Travis fails to download Spark release but doesn't stop. Stopping would make it easier to track down and check the exit code.
### What type of PR is it?
Improvement
### Todos
* [x] - Update spark download & start/stop script
### Is there a relevant Jira issue?
N/A
### How should this be tested?
Run Travis CI
### Screenshots (if appropriate)
N/A
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Felix Cheung <fe...@hotmail.com>
Closes #710 from felixcheung/checkdownload and squashes the following commits:
d89cde9 [Felix Cheung] fix version check
3c64db8 [Felix Cheung] spark script stop on error
Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/d74f98ab
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/d74f98ab
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/d74f98ab
Branch: refs/heads/master
Commit: d74f98ab35ac9475eb578ce7bed04dc4fb471236
Parents: 54f7289
Author: Felix Cheung <fe...@hotmail.com>
Authored: Sat Feb 13 20:58:49 2016 -0800
Committer: Felix Cheung <fe...@apache.org>
Committed: Thu Feb 18 16:42:32 2016 -0800
----------------------------------------------------------------------
testing/startSparkCluster.sh | 26 ++++++++++++++++++++------
testing/stopSparkCluster.sh | 4 +++-
2 files changed, 23 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/d74f98ab/testing/startSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 8b7ad36..ded1204 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -26,13 +26,26 @@ fi
SPARK_VERSION="${1}"
HADOOP_VERSION="${2}"
+echo ${SPARK_VERSION} | grep "^1.[123].[0-9]" > /dev/null
+if [ $? -eq 0 ]; then
+ echo "${SPARK_VERSION}" | grep "^1.[12].[0-9]" > /dev/null
+ if [ $? -eq 0 ]; then
+ SPARK_VER_RANGE="<=1.2"
+ else
+ SPARK_VER_RANGE="<=1.3"
+ fi
+else
+ SPARK_VER_RANGE=">1.3"
+fi
+
+set -xe
+
FWDIR=$(dirname "${BASH_SOURCE-$0}")
ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
-echo "SPARK_HOME is ${SPARK_HOME} "
+echo "SPARK_HOME is ${SPARK_HOME}"
if [ ! -d "${SPARK_HOME}" ]; then
- echo "${SPARK_VERSION}" | grep "^1.[12].[0-9]" > /dev/null
- if [ $? -eq 0 ]; then
+ if [ "${SPARK_VER_RANGE}" == "<=1.2" ]; then
# spark 1.1.x and spark 1.2.x can be downloaded from archive
wget -q http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
else
@@ -51,16 +64,17 @@ fi
mkdir -p ${SPARK_HOME}/run
export SPARK_PID_DIR=${SPARK_HOME}/run
-
# start
export SPARK_MASTER_PORT=7071
export SPARK_MASTER_WEBUI_PORT=7072
export SPARK_WORKER_WEBUI_PORT=8082
${SPARK_HOME}/sbin/start-master.sh
-echo ${SPARK_VERSION} | grep "^1.[123].[0-9]" > /dev/null
-if [ $? -eq 0 ]; then # spark 1.3 or prior
+if [ "${SPARK_VER_RANGE}" == "<=1.3" ]||[ "${SPARK_VER_RANGE}" == "<=1.2" ]; then
+ # spark 1.3 or prior
${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}
else
${SPARK_HOME}/sbin/start-slave.sh spark://`hostname`:7071
fi
+
+set +xe
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/d74f98ab/testing/stopSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/stopSparkCluster.sh b/testing/stopSparkCluster.sh
index 0ae312d..1bf8eac 100755
--- a/testing/stopSparkCluster.sh
+++ b/testing/stopSparkCluster.sh
@@ -25,6 +25,8 @@ fi
SPARK_VERSION="${1}"
HADOOP_VERSION="${2}"
+set -xe
+
FWDIR=$(dirname "${BASH_SOURCE-$0}")
ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
@@ -32,6 +34,6 @@ export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VER
# set create PID dir
export SPARK_PID_DIR=${SPARK_HOME}/run
-
${SPARK_HOME}/sbin/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
${SPARK_HOME}/sbin/stop-master.sh
+set +xe