You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by pd...@apache.org on 2020/09/07 08:36:17 UTC
[zeppelin] branch master updated: [ZEPPELIN-5024] Improve Travis-CI
This is an automated email from the ASF dual-hosted git repository.
pdallig pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push:
new 3b994f3 [ZEPPELIN-5024] Improve Travis-CI
3b994f3 is described below
commit 3b994f35e039f741afd5fe5345d2974586ece81a
Author: Philipp Dallig <ph...@gmail.com>
AuthorDate: Tue Sep 1 14:21:26 2020 +0200
[ZEPPELIN-5024] Improve Travis-CI
### What is this PR for?
Travis-CI:
- remove all environment variables from the build matrix, to use the same cache for all jobs
- activate the mysql service only when this service is needed
- activate the xvfb service, when is necessary and possible
- removed Bower Caching to remove too many complicated lines in Travis-ci
- Giving the test names
- Installing R only once with conda, previously it was installed twice with 'testing/install_R.sh' and 'testing/install_external_dependencies.sh
- remove the R-Cache, because the installation with conda is quite fast
- Delete 'test/install_R.sh' because it is no longer used
Other:
- Ignore the tests in 'HeliumApplicationFactoryTest.java' to get the JUnit tests running in the IDE + remove exclude in 'travis.yml
- Remove deprecation warning in maven-surefire-plugin
- Helium works better with an absolute path, because a relative path in PATH, is not a good idea for local testing
- Remove JVM language dependent asserts
### What type of PR is it?
Improvement
### Todos
* [ ] - Task
### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-5024
### How should this be tested?
* Travis-CI: https://travis-ci.org/github/Reamer/zeppelin/builds/723116251
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Philipp Dallig <ph...@gmail.com>
Closes #3897 from Reamer/test_refactoring and squashes the following commits:
5f0dc196d [Philipp Dallig] Remove zeppelin artifacts from m2 cache
e44957dee [Philipp Dallig] Remove old install_R.sh
cb0ee7ff2 [Philipp Dallig] Install R with conda, because then we need no sudo rights
b57906b43 [Philipp Dallig] Remove Spark distribution from cache, because the version is differs between tests
b2995a181 [Philipp Dallig] Give specific jobs an unique cache
837ff9d7f [Philipp Dallig] Remove env in each job to use the same cache
4153765fb [Philipp Dallig] Remove bower_components cache
7ae02b19d [Philipp Dallig] Only run mysql, if we need mysql for integration tests
d70e2011a [Philipp Dallig] Use service for xvfb - display tests
e0a6ba3db [Philipp Dallig] Naming build jobs
ab1694e56 [Philipp Dallig] forkMode is deprecated, migrate to new properties
f0fe75167 [Philipp Dallig] The String "command not found" and "No such file or directory" depends on your JVM language.
c0b807104 [Philipp Dallig] Disable HeliumApplicationFactoryTest in SourceCode
2b13a0e99 [Philipp Dallig] The maven-frontend-plugin works better with an absolute path. A relative path in the PATH environment variable can be problematic.
---
.travis.yml | 498 ++++++++++++++++-----
flink/interpreter/pom.xml | 1 -
pig/pom.xml | 3 +-
r/pom.xml | 3 +-
testing/install_R.sh | 24 -
testing/install_external_dependencies.sh | 46 +-
zeppelin-interpreter-integration/pom.xml | 3 +-
zeppelin-server/pom.xml | 3 +-
zeppelin-zengine/pom.xml | 3 +-
.../zeppelin/helium/HeliumBundleFactory.java | 2 +-
.../helium/HeliumApplicationFactoryTest.java | 8 +
.../zeppelin/helium/HeliumBundleFactoryTest.java | 2 +-
.../interpreter/remote/RemoteInterpreterTest.java | 5 +-
13 files changed, 415 insertions(+), 186 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 3c02459..faf1b0c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,179 +17,429 @@ language: java
before_cache:
- sudo chown -R travis:travis $HOME/.m2
+ # Ensure that jobs do not influence each other with installed Zeppelin Jars
+ - rm -rf $HOME/.m2/repository/org/apache/zeppelin/
cache:
apt: true
directories:
- - .spark-dist
- ${HOME}/.m2
- - ${HOME}/R
- zeppelin-web/node
- zeppelin-web/node_modules
- - zeppelin-web/bower_components
-
-services:
- - mysql
env:
global:
# Interpreters does not required by zeppelin-server integration tests
- INTERPRETERS='!beam,!hbase,!pig,!jdbc,!file,!flink,!ignite,!kylin,!lens,!cassandra,!elasticsearch,!bigquery,!alluxio,!scio,!livy,!groovy,!sap,!java,!geode,!neo4j,!hazelcastjet,!submarine,!sparql,!mongodb'
+ - CI="true"
jobs:
include:
- # Test License compliance using RAT tool
- - jdk: "openjdk8"
+ - name: "Test License compliance using RAT tool"
+ jdk: "openjdk8"
dist: xenial
- env: SCALA_VER="2.11" PROFILE="-Prat" BUILD_FLAG="clean" TEST_FLAG="org.apache.rat:apache-rat-plugin:check" TEST_PROJECTS=""
+ env: CACHE_NAME=rat
+ before_install:
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit'" >> ~/.mavenrc
+ install:
+ - mvn clean -Prat -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn org.apache.rat:apache-rat-plugin:check -Prat -B
- # Default build command, no tests
- - jdk: "openjdk8"
+
+ - name: "Default build command, no tests"
+ jdk: "openjdk8"
dist: xenial
- env: BUILD_FLAG="clean package -T C2 -DskipTests -Pweb-angular" TEST_FLAG="test -DskipTests -Pweb-angular"
+ before_install:
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ install:
+ - mvn clean package -T C2 -DskipTests -Pweb-angular -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipTests -Pweb-angular -B
- # Run e2e tests (in zeppelin-web)
- # chrome dropped the support for precise (ubuntu 12.04), so need to use trusty
- # also, can't use JDK 7 in trusty: https://github.com/travis-ci/travis-ci/issues/7884
- - os: linux
+
+ - name: "Run e2e tests in zeppelin-web"
+ os: linux
dist: xenial
jdk: "openjdk8"
- env: CI="true" WEB_E2E="true" PYTHON="2" SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Phadoop2 -Pscala-2.11" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl ${INTERPRETERS}" TEST_MODULES="-pl zeppelin-web" TEST_PROJECTS="-Pweb-e2e"
+ services:
+ - xvfb
addons:
apt:
packages:
- google-chrome-stable
+ before_install:
+ - export PYTHON=2
+ - export SPARK_VER="2.1.0"
+ - export HADOOP_VER="2.6"
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl ${INTERPRETERS} -Phadoop2 -Pscala-2.11 -B
+ before_script:
+ - travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -DskipRat -pl zeppelin-web -Phadoop2 -Pscala-2.11 -B -Pweb-e2e
+
- # Run tests (in zeppelin-web-angular)
- - os: linux
+ - name: "Run tests in zeppelin-web-angular"
+ os: linux
dist: xenial
jdk: "openjdk8"
- env: CI="true" BUILD_FLAG="clean -DskipTests -DskipRat" TEST_FLAG="package -DskipRat" MODULES="-pl ${INTERPRETERS}" TEST_MODULES="-pl zeppelin-web-angular -Pweb-angular"
+ services:
+ - xvfb
addons:
apt:
packages:
- google-chrome-stable
+ before_install:
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ install:
+ - mvn clean -DskipTests -DskipRat -pl ${INTERPRETERS} -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn package -DskipRat -pl zeppelin-web-angular -Pweb-angular -B
+
- # Test core modules (zeppelin-interpreter,zeppelin-zengine,zeppelin-server) on hadoop2
- # Several tests were excluded from this configuration due to the following issues:
- # HeliumApplicationFactoryTest - https://issues.apache.org/jira/browse/ZEPPELIN-2470
- # After issues are fixed these tests need to be included back by removing them from the "-Dtests.to.exclude" property
- - jdk: "openjdk8"
+ # After issues are fixed tests needs to be included back by removing them from the "-Dtests.to.exclude" property
+ - name: "Test core modules (zeppelin-interpreter,zeppelin-zengine,zeppelin-server) on hadoop2"
+ jdk: "openjdk8"
dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" R="true" PROFILE="-Phelium-dev -Pexamples -Phadoop2" BUILD_FLAG="install -Pbuild-distr -DskipRat -DskipTests" TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" MODULES="-pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am" TEST_PROJECTS="-Dtests.to.exclude=**/org/apache/zeppelin/spark/*,**/HeliumApplicationFactoryTest.java -DfailIfNoTests=false"
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -Pbuild-distr -DskipRat -DskipTests -pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -Phadoop2 -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -Pusing-packaged-distr -DskipRat -pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -Phadoop2 -B -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
+
- # Test core modules (zeppelin-interpreter,zeppelin-zengine,zeppelin-server) on hadoop3
- - jdk: "openjdk8"
+ - name: "Test core modules (zeppelin-interpreter,zeppelin-zengine,zeppelin-server) on hadoop3"
+ jdk: "openjdk8"
dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" R="true" PROFILE="-Phelium-dev -Pexamples -Phadoop3" BUILD_FLAG="install -Pbuild-distr -DskipRat -DskipTests" TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" MODULES="-pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am" TEST_PROJECTS="-Dtests.to.exclude=**/org/apache/zeppelin/spark/*,**/HeliumApplicationFactoryTest.java -DfailIfNoTests=false"
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -Pbuild-distr -DskipRat -DskipTests -pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -Phadoop3 -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -Pusing-packaged-distr -DskipRat -pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -Phadoop3 -B -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
- # Test selenium with spark module for spark 2.3
- - jdk: "openjdk8"
+
+ - name: "Test selenium with spark module for spark 2.3"
+ jdk: "openjdk8"
dist: xenial
addons:
firefox: "31.0"
- env: BUILD_PLUGINS="true" CI="true" PYTHON="2" R="true" SCALA_VER="2.11" SPARK_VER="2.3.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.3 -Phadoop2 -Phelium-dev -Pexamples -Pintegration -Pspark-scala-2.11" BUILD_FLAG="clean install -DskipTests -DskipRat -pl ${INTERPRETERS}" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl zeppelin-integration -DfailIfNoTests=false"
+ before_install:
+ - export PYTHON=2
+ - export R=true
+ - export SPARK_VER="2.3.2"
+ - export HADOOP_VER="2.6"
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1600x1024x16"
+ - source ~/.environ
+ install:
+ - mvn clean install -DskipTests -DskipRat -pl ${INTERPRETERS} -Pspark-2.3 -Phadoop2 -Phelium-dev -Pexamples -Pintegration -Pspark-scala-2.11 -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -DskipRat -Pspark-2.3 -Phadoop2 -Phelium-dev -Pexamples -Pintegration -Pspark-scala-2.11 -B -pl zeppelin-integration -DfailIfNoTests=false
+
- # Test interpreter modules
- - jdk: "openjdk8"
+ - name: "Test interpreter modules"
+ jdk: "openjdk8"
dist: xenial
- env: PYTHON="3" R="true" SCALA_VER="2.10" TENSORFLOW="1.13.1" PROFILE="-Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat" MODULES="-pl $(echo .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} | sed 's/!//g')" TEST_PROJECTS=""
+ before_install:
+ - export TENSORFLOW=1.13.1
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -am -pl $(echo .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} | sed 's/!//g') -Pscala-2.10 -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl $(echo .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} | sed 's/!//g') -Pscala-2.10 -B
+
- # Test flink 1.10 & flink integration test
- - jdk: "openjdk8"
+ - name: "Test flink 1.10 & flink integration test"
+ jdk: "openjdk8"
dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" FLINK="1.10.1" PROFILE="-Pflink-1.10 -Pintegration" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat" MODULES="-pl flink/interpreter,zeppelin-interpreter-integration" TEST_PROJECTS="-Dtest=org.apache.zeppelin.flink.*,FlinkIntegrationTest110,ZeppelinFlinkClusterTest110"
+ before_install:
+ - export FLINK=1.10.1
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -am -pl flink/interpreter,zeppelin-interpreter-integration -Pflink-1.10 -Pintegration -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl flink/interpreter,zeppelin-interpreter-integration -Pflink-1.10 -Pintegration -B -Dtest=org.apache.zeppelin.flink.*,FlinkIntegrationTest110,ZeppelinFlinkClusterTest110
- # Test flink 1.11 & flink integration test
- - jdk: "openjdk8"
+
+ - name: "Test flink 1.11 & flink integration test"
+ jdk: "openjdk8"
dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" FLINK="1.11.1" PROFILE="-Pflink-1.11 -Pintegration" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat" MODULES="-pl flink/interpreter,zeppelin-interpreter-integration" TEST_PROJECTS="-Dtest=org.apache.zeppelin.flink.*,FlinkIntegrationTest111,ZeppelinFlinkClusterTest111"
+ before_install:
+ - export FLINK=1.11.1
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -am -pl flink/interpreter,zeppelin-interpreter-integration -Pflink-1.11 -Pintegration -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl flink/interpreter,zeppelin-interpreter-integration -Pflink-1.11 -Pintegration -B -Dtest=org.apache.zeppelin.flink.*,FlinkIntegrationTest111,ZeppelinFlinkClusterTest111
+
# Run Spark integration test and unit test
+ - name: "Run spark integration of in one zeppelin instance: Spark 3.0"
+ jdk: "openjdk8"
+ dist: xenial
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B -Dtest=ZeppelinSparkClusterTest30,SparkIntegrationTest30 -DfailIfNoTests=false
+
+
+ - name: "Run spark integration of in one zeppelin instance (2.4, 2.3, 2.2)"
+ jdk: "openjdk8"
+ dist: xenial
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B -Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest23,SparkIntegrationTest23,ZeppelinSparkClusterTest22,SparkIntegrationTest22 -DfailIfNoTests=false
+
+
+ - name: "JdbcIntegrationTest, Unit test of Spark 2.4 (Scala-2.11)"
+ jdk: "openjdk8"
+ dist: xenial
+ services:
+ - mysql
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies,markdown -am -Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration -B
+ - mvn clean package -T 2C -pl zeppelin-plugins -amd -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies,markdown -am -Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration -B -Dtest=JdbcIntegrationTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false
+
+
+ - name: "Unit test of Spark 2.4 (Scala-2.12)"
+ jdk: "openjdk8"
+ dist: xenial
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl spark/spark-dependencies -am -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl spark/spark-dependencies -am -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false
+
+
+ - name: "Unit test of Spark 2.3 (Scala-2.11) and Unit test python, jupyter and r interpreter under python3"
+ jdk: "openjdk8"
+ dist: xenial
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl spark/spark-dependencies -am -Pspark-2.3 -Pspark-scala-2.11 -Phadoop2 -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl spark/spark-dependencies -am -Pspark-2.3 -Pspark-scala-2.11 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false
+
+
+ - name: "Unit test of Spark 2.2 (Scala-2.10) and Unit test python, jupyter and r interpreter under python3"
+ jdk: "openjdk8"
+ dist: xenial
+ before_install:
+ - export R=true
+ - export PYTHON=3
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl spark/spark-dependencies -am -Pspark-2.2 -Pspark-scala-2.10 -Phadoop2 -B
+ before_script:
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn test -DskipRat -pl spark/spark-dependencies -am -Pspark-2.2 -Pspark-scala-2.10 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false
+
+
+ - name: "Test python/pyspark with python 2, livy 0.5"
+ dist: xenial
+ jdk: "openjdk8"
+ before_install:
+ - export PYTHON="2"
+ - export SPARK_VER="1.6.3"
+ - export HADOOP_VER="2.6"
+ - export LIVY_VER="0.5.0-incubating"
+ - export R="true"
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl livy -am -Pspark-1.6 -Phadoop2 -Pscala-2.10 -B
+ before_script:
+ - travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER
+ - ./testing/downloadLivy.sh $LIVY_VER
+ - export LIVY_HOME=`pwd`/livy-$LIVY_VER-bin
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -DskipRat -pl livy -am -Pspark-1.6 -Phadoop2 -Pscala-2.10 -B
+
+
+ - name: "Test livy 0.5 with spark 2.2.0 under python3"
+ dist: xenial
+ jdk: "openjdk8"
+ before_install:
+ - export PYTHON="3"
+ - export SPARK_VER="2.2.0"
+ - export HADOOP_VER="2.6"
+ - export LIVY_VER="0.5.0-incubating"
+ - export R="true"
+ - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
+ - bash -x ./testing/install_external_dependencies.sh
+ - source ~/.environ
+ install:
+ - mvn install -DskipTests -DskipRat -pl livy -am -B
+ before_script:
+ - travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER
+ - ./testing/downloadLivy.sh $LIVY_VER
+ - export LIVY_HOME=`pwd`/livy-$LIVY_VER-bin
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER
+ - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh
+ - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
+ - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
+ - export SPARK_PRINT_LAUNCH_COMMAND=true
+ - tail conf/zeppelin-env.sh
+ script:
+ - mvn verify -DskipRat -pl livy -am -B
- # Run spark integration of in one zeppelin instance: Spark 3.0
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.12" PROFILE="-Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest30,SparkIntegrationTest30 -DfailIfNoTests=false"
-
- # Run spark integration of in one zeppelin instance (2.4, 2.3, 2.2)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest23,SparkIntegrationTest23,ZeppelinSparkClusterTest22,SparkIntegrationTest22 -DfailIfNoTests=false"
-
- # JdbcIntegrationTest, Unit test of Spark 2.4 (Scala-2.11)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=JdbcIntegrationTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false"
-
- # Unit test of Spark 2.4 (Scala-2.12)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.12" PROFILE="-Pspark-2.4 -Pspark-scala-2.12 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false"
-
- # Unit test of Spark 2.3 (Scala-2.11) and Unit test python, jupyter and r interpreter under python3
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Pspark-scala-2.11 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false"
-
- # Unit test of Spark 2.2 (Scala-2.10) and Unit test python, jupyter and r interpreter under python3
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.2 -Pspark-scala-2.10 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false"
-
- # Test python/pyspark with python 2, livy 0.5
- - dist: xenial
- jdk: "openjdk8"
- env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" LIVY_VER="0.5.0-incubating" R="true" PROFILE="-Pspark-1.6 -Phadoop2 -Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl livy -am" TEST_PROJECTS=""
-
- # Test livy 0.5 with spark 2.2.0 under python3
- - dist: xenial
- jdk: "openjdk8"
- env: PYTHON="3" SPARK_VER="2.2.0" HADOOP_VER="2.6" LIVY_VER="0.5.0-incubating" R="true" PROFILE="" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl livy -am" TEST_PROJECTS=""
-
-before_install:
- # check files included in commit range, clear bower_components if a bower.json file has changed.
- # bower cache clearing can also be forced by putting "bower clear" or "clear bower" in a commit message
- - changedfiles=$(git diff --name-only $TRAVIS_COMMIT_RANGE 2>/dev/null) || changedfiles=""
- - echo $changedfiles
- - hasbowerchanged=$(echo $changedfiles | grep -c "bower.json" || true);
- - gitlog=$(git log $TRAVIS_COMMIT_RANGE 2>/dev/null) || gitlog=""
- - clearcache=$(echo $gitlog | grep -c -E "clear bower|bower clear" || true)
- - if [ "$hasbowerchanged" -gt 0 ] || [ "$clearcache" -gt 0 ]; then echo "Clearing bower_components cache"; rm -r zeppelin-web/bower_components; npm cache verify; else echo "Using cached bower_components."; fi
- - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> ~/.mavenrc
- - if [[ -n $R ]]; then ./testing/install_R.sh; fi
- - bash -x ./testing/install_external_dependencies.sh
- - ls -la .spark-dist ${HOME}/.m2/repository/.cache/maven-download-plugin || true
- - ls .node_modules && cp -r .node_modules zeppelin-web/node_modules || echo "node_modules are not cached"
- - "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1600x1024x16"
- #- ./dev/change_scala_version.sh $SCALA_VER
- - source ~/.environ
-
-install:
- - echo "mvn $BUILD_FLAG $MODULES $PROFILE -B"
- - mvn $BUILD_FLAG $MODULES $PROFILE -B
- - if [ x"$BUILD_PLUGINS" == x"true" ]; then echo "mvn clean package -T 2C -pl zeppelin-plugins -amd -B"; mvn clean package -T 2C -pl zeppelin-plugins -amd -B; fi
-
-before_script:
- - if [[ -n $SPARK_VER ]]; then travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER; fi
- - if [[ -n $LIVY_VER ]]; then ./testing/downloadLivy.sh $LIVY_VER; fi
- - if [[ -n $LIVY_VER ]]; then export LIVY_HOME=`pwd`/livy-$LIVY_VER-bin; fi
- - if [[ -n $LIVY_VER ]]; then export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER; fi
- - if [[ -n $SPARK_VER ]]; then export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER; fi
- - if [[ -n $SPARK_VER ]]; then echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh; fi
- - echo "export ZEPPELIN_HELIUM_REGISTRY=helium" >> conf/zeppelin-env.sh
- - echo "export SPARK_PRINT_LAUNCH_COMMAND=true" >> conf/zeppelin-env.sh
- - export SPARK_PRINT_LAUNCH_COMMAND=true
- - tail conf/zeppelin-env.sh
- # https://docs.travis-ci.com/user/gui-and-headless-browsers/#Using-xvfb-to-Run-Tests-That-Require-a-GUI
- - if [[ -n $TEST_MODULES ]]; then export DISPLAY=:99.0; sh -e /etc/init.d/xvfb start; sleep 3; fi
- # display info log for debugging
- - if [[ -n $TEST_MODULES ]]; then echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=info'" > ~/.mavenrc; fi
-
-script:
- - if [[ -n $TEST_MODULES ]]; then export MODULES="${TEST_MODULES}"; fi
- - echo "mvn $TEST_FLAG $MODULES $PROFILE -B $TEST_PROJECTS"
- - mvn $TEST_FLAG $MODULES $PROFILE -B $TEST_PROJECTS
after_success:
- echo "Travis exited with ${TRAVIS_TEST_RESULT}"
diff --git a/flink/interpreter/pom.xml b/flink/interpreter/pom.xml
index 83e2c01..bbfef74 100644
--- a/flink/interpreter/pom.xml
+++ b/flink/interpreter/pom.xml
@@ -694,7 +694,6 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>false</skip>
- <forkMode>always</forkMode>
<forkCount>1</forkCount>
<reuseForks>false</reuseForks>
<!-- set sun.zip.disableMemoryMapping=true because of
diff --git a/pig/pom.xml b/pig/pom.xml
index 2b1b962..7a49d96 100644
--- a/pig/pom.xml
+++ b/pig/pom.xml
@@ -160,7 +160,8 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
- <forkMode>always</forkMode>
+ <forkCount>1</forkCount>
+ <reuseForks>false</reuseForks>
</configuration>
</plugin>
<plugin>
diff --git a/r/pom.xml b/r/pom.xml
index 6ebefae..46340e4 100644
--- a/r/pom.xml
+++ b/r/pom.xml
@@ -189,7 +189,8 @@
<junitxml>.</junitxml>
<filereports>testoutput.txt</filereports>
<parallel>false</parallel>
- <forkMode>once</forkMode>
+ <forkCount>1</forkCount>
+ <reuseForks>true</reuseForks>
<systemProperties>
<scala.usejavacp>true</scala.usejavacp>
</systemProperties>
diff --git a/testing/install_R.sh b/testing/install_R.sh
deleted file mode 100755
index 63f3d4e..0000000
--- a/testing/install_R.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Install instruction from here https://cran.r-project.org/bin/linux/ubuntu/README.html
-
-sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9
-echo "deb https://cloud.r-project.org/bin/linux/ubuntu xenial-cran35/" | sudo tee -a /etc/apt/sources.list
-sudo apt-get update
-sudo apt-get install -y r-base
diff --git a/testing/install_external_dependencies.sh b/testing/install_external_dependencies.sh
index c4c8ea1..22fb0df 100755
--- a/testing/install_external_dependencies.sh
+++ b/testing/install_external_dependencies.sh
@@ -20,20 +20,23 @@
set -ev
touch ~/.environ
-# Install Python dependencies for Python specific tests
+# Install conda for Python and R dependencies
if [[ -n "$PYTHON" ]] ; then
wget "https://repo.continuum.io/miniconda/Miniconda${PYTHON}-4.6.14-Linux-x86_64.sh" -O miniconda.sh
+else
+ wget "https://repo.continuum.io/miniconda/Miniconda3-4.6.14-Linux-x86_64.sh" -O miniconda.sh
+fi
+bash miniconda.sh -b -p "$HOME/miniconda"
+rm -fv miniconda.sh
+echo "export PATH='$HOME/miniconda/bin:$PATH'" >> ~/.environ
+source ~/.environ
+hash -r
+conda config --set always_yes yes --set changeps1 no
+conda update -q conda
+conda info -a
+conda config --add channels conda-forge
- bash miniconda.sh -b -p "$HOME/miniconda"
- echo "export PATH='$HOME/miniconda/bin:$PATH'" >> ~/.environ
- source ~/.environ
-
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
- conda config --add channels conda-forge
-
+if [[ -n "$PYTHON" ]] ; then
if [[ "$PYTHON" == "2" ]] ; then
pip install -q numpy==1.14.5 pandas==0.21.1 matplotlib==2.1.1 scipy==1.2.1 grpcio==1.19.0 bkzep==0.6.1 hvplot==0.5.2 \
protobuf==3.7.0 pandasql==0.7.3 ipython==5.8.0 ipykernel==4.10.0 bokeh==1.3.4 panel==0.6.0 holoviews==1.12.3
@@ -56,21 +59,8 @@ fi
# Install R dependencies if R is true
if [[ "$R" == "true" ]] ; then
- echo "R_LIBS=~/R" > ~/.Renviron
- echo "export R_LIBS=~/R" >> ~/.environ
- source ~/.environ
-
- sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9
- sudo add-apt-repository 'deb [arch=amd64,i386] https://cran.rstudio.com/bin/linux/ubuntu xenial/'
- sudo apt-get update
- sudo apt-get install r-base
-
- mkdir -p ~/R
- R -e "install.packages('evaluate', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
- R -e "install.packages('base64enc', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
- R -e "install.packages('knitr', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
- R -e "install.packages('ggplot2', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
- R -e "install.packages('IRkernel', repos = 'https://cloud.r-project.org', lib='~/R');IRkernel::installspec()" > /dev/null 2>&1
- R -e "install.packages('shiny', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
- R -e "install.packages('googleVis', repos = 'https://cloud.r-project.org', lib='~/R')" > /dev/null 2>&1
+ conda install -y --quiet r-base r-evaluate r-base64enc r-knitr r-ggplot2 r-irkernel r-shiny r-googlevis
+ R -e "IRkernel::installspec()"
+ echo "R_LIBS=~/miniconda/lib/R/library" > ~/.Renviron
+ echo "export R_LIBS=~/miniconda/lib/R/library" >> ~/.environ
fi
diff --git a/zeppelin-interpreter-integration/pom.xml b/zeppelin-interpreter-integration/pom.xml
index 8c8bfa2..71ab23f 100644
--- a/zeppelin-interpreter-integration/pom.xml
+++ b/zeppelin-interpreter-integration/pom.xml
@@ -185,7 +185,8 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
- <forkMode>always</forkMode>
+ <forkCount>1</forkCount>
+ <reuseForks>false</reuseForks>
<argLine>-Xmx3072m</argLine>
</configuration>
</plugin>
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index c0d549f..9fd635a 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -382,7 +382,8 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration combine.children="append">
- <forkMode>always</forkMode>
+ <forkCount>1</forkCount>
+ <reuseForks>false</reuseForks>
<argLine>-Xmx3g -Xms1g -Dfile.encoding=UTF-8</argLine>
<excludes>
<exclude>${tests.to.exclude}</exclude>
diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml
index 9977d68..7cd4368 100644
--- a/zeppelin-zengine/pom.xml
+++ b/zeppelin-zengine/pom.xml
@@ -303,7 +303,8 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
- <forkMode>always</forkMode>
+ <reuseForks>false</reuseForks>
+ <forkCount>1</forkCount>
<systemProperties>
<java.io.tmpdir>${project.build.directory}</java.io.tmpdir>
</systemProperties>
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java
index fb419fb..3beba56 100644
--- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java
@@ -109,7 +109,7 @@ public class HeliumBundleFactory {
@Inject
public HeliumBundleFactory(ZeppelinConfiguration conf) {
this.heliumLocalRepoDirectory =
- new File(conf.getRelativeDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), HELIUM_LOCAL_REPO);
+ new File(conf.getRelativeDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), HELIUM_LOCAL_REPO).getAbsoluteFile();
this.heliumBundleDirectory = new File(heliumLocalRepoDirectory, HELIUM_BUNDLES_DIR);
this.heliumLocalModuleDirectory = new File(heliumLocalRepoDirectory, HELIUM_LOCAL_MODULE_DIR);
this.yarnCacheDir = new File(heliumLocalRepoDirectory, YARN_CACHE_DIR);
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java
index 3ff965d..de2a331 100644
--- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java
@@ -40,6 +40,7 @@ import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.user.Credentials;
import org.junit.After;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
@@ -49,6 +50,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
private HeliumApplicationFactory heliumAppFactory;
private AuthenticationInfo anonymous;
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
@@ -79,6 +81,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
anonymous = new AuthenticationInfo("anonymous");
}
+ @Override
@After
public void tearDown() throws Exception {
super.tearDown();
@@ -86,6 +89,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
@Test
+ @Ignore
public void testLoadRunUnloadApplication()
throws IOException, ApplicationException, InterruptedException {
// given
@@ -132,6 +136,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
}
@Test
+ @Ignore
public void testUnloadOnParagraphRemove() throws IOException {
// given
HeliumPackage pkg1 = new HeliumPackage(HeliumType.APPLICATION,
@@ -170,6 +175,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
@Test
+ @Ignore
public void testUnloadOnInterpreterUnbind() throws IOException {
// given
HeliumPackage pkg1 = new HeliumPackage(HeliumType.APPLICATION,
@@ -205,6 +211,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
}
@Test
+ @Ignore
public void testInterpreterUnbindOfNullReplParagraph() throws IOException {
// create note
Note note1 = notebook.createNote("note1", anonymous);
@@ -228,6 +235,7 @@ public class HeliumApplicationFactoryTest extends AbstractInterpreterTest {
@Test
+ @Ignore
public void testUnloadOnInterpreterRestart() throws IOException, InterpreterException {
// given
HeliumPackage pkg1 = new HeliumPackage(HeliumType.APPLICATION,
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java
index 3428c95..d47fead 100644
--- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java
@@ -48,7 +48,7 @@ public class HeliumBundleFactoryTest {
ZeppelinConfiguration conf = ZeppelinConfiguration.create();
nodeInstallationDir =
- new File(conf.getRelativeDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), HELIUM_LOCAL_REPO);
+ new File(conf.getRelativeDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), HELIUM_LOCAL_REPO).getAbsoluteFile();
hbf = new HeliumBundleFactory(conf);
hbf.installNodeAndNpm();
hbf.copyFrameworkModulesToInstallPath(true);
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
index da6a251..63e0bdc 100644
--- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
@@ -54,6 +54,7 @@ public class RemoteInterpreterTest extends AbstractInterpreterTest {
private InterpreterSetting interpreterSetting;
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
@@ -402,7 +403,7 @@ public class RemoteInterpreterTest extends AbstractInterpreterTest {
interpreter1.interpret("1", context1);
fail("Should not be able to launch interpreter process");
} catch (InterpreterException e) {
- assertTrue(ExceptionUtils.getStackTrace(e).contains("No such file or directory"));
+ assertTrue(ExceptionUtils.getStackTrace(e).contains("java.io.IOException"));
}
} finally {
System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER.getVarName());
@@ -422,7 +423,7 @@ public class RemoteInterpreterTest extends AbstractInterpreterTest {
interpreter1.interpret("1", context1);
fail("Should not be able to launch interpreter process");
} catch (InterpreterException e) {
- assertTrue(ExceptionUtils.getStackTrace(e).contains("invalid_command: command not found"));
+ assertTrue(ExceptionUtils.getStackTrace(e).contains("invalid_command:"));
}
} finally {
System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER.getVarName());