You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2020/04/04 15:05:19 UTC

[zeppelin] branch branch-0.9 updated: [ZEPPELIN-4719] Use new travis semantic

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.9 by this push:
     new 3017748  [ZEPPELIN-4719] Use new travis semantic
3017748 is described below

commit 301774869d3ea1bc6bd23ba339db7964c50de041
Author: Philipp Dallig <ph...@gmail.com>
AuthorDate: Thu Apr 2 13:13:17 2020 +0200

    [ZEPPELIN-4719] Use new travis semantic
    
    ### What is this PR for?
    Updates travis file to new semantic
    
    ### What type of PR is it?
    * Improvement
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-4719
    
    ### How should this be tested?
    * Checkout _Build config validation_ in travis job
    * **Travis-Link**: https://travis-ci.org/github/Reamer/zeppelin/builds/670088229
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Philipp Dallig <ph...@gmail.com>
    
    Closes #3714 from Reamer/update_travis_config and squashes the following commits:
    
    462c9a825 [Philipp Dallig] Use new travis semantic
    
    (cherry picked from commit 0bcc1a2dfba37efe7176233e7b21324a8e497de3)
    Signed-off-by: Jeff Zhang <zj...@apache.org>
---
 .travis.yml | 46 ++++++++++++++--------------------------------
 1 file changed, 14 insertions(+), 32 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 076638c..5082aac 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -15,9 +15,6 @@
 
 language: java
 
-sudo: false
-
-
 before_cache:
     - sudo chown -R travis:travis $HOME/.m2
 
@@ -53,7 +50,7 @@ env:
     # Interpreters does not required by zeppelin-server integration tests
     - INTERPRETERS='!beam,!hbase,!pig,!jdbc,!file,!flink,!ignite,!kylin,!lens,!cassandra,!elasticsearch,!bigquery,!alluxio,!scio,!livy,!groovy,!sap,!java,!geode,!neo4j,!hazelcastjet,!submarine,!sparql,!mongodb'
 
-matrix:
+jobs:
   include:
     # Test License compliance using RAT tool
     - jdk: "openjdk8"
@@ -61,8 +58,7 @@ matrix:
       env: SCALA_VER="2.11" PROFILE="-Prat" BUILD_FLAG="clean" TEST_FLAG="org.apache.rat:apache-rat-plugin:check" TEST_PROJECTS=""
 
     # Default build command, no tests
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_FLAG="clean package -T C2 -DskipTests -Pweb-angular" TEST_FLAG="test -DskipTests -Pweb-angular"
 
@@ -70,7 +66,6 @@ matrix:
     # chrome dropped the support for precise (ubuntu 12.04), so need to use trusty
     # also, can't use JDK 7 in trusty: https://github.com/travis-ci/travis-ci/issues/7884
     - os: linux
-      sudo: false
       dist: xenial
       jdk: "openjdk8"
       env: CI="true" WEB_E2E="true" PYTHON="2" SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Phadoop2 -Pscala-2.11" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl ${INTERPRETERS}" TEST_MODULES="-pl zeppelin-web" TEST_PROJECTS="-Pweb-e2e"
@@ -81,7 +76,6 @@ matrix:
 
     # Run tests (in zeppelin-web-angular)
     - os: linux
-      sudo: false
       dist: xenial
       jdk: "openjdk8"
       env: CI="true" BUILD_FLAG="clean -DskipTests -DskipRat" TEST_FLAG="package -DskipRat" MODULES="-pl ${INTERPRETERS}" TEST_MODULES="-pl zeppelin-web-angular -Pweb-angular"
@@ -94,8 +88,7 @@ matrix:
     # Several tests were excluded from this configuration due to the following issues:
     # HeliumApplicationFactoryTest - https://issues.apache.org/jira/browse/ZEPPELIN-2470
     # After issues are fixed these tests need to be included back by removing them from the "-Dtests.to.exclude" property
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="true" PYTHON="3" R="true" PROFILE="-Phelium-dev -Pexamples" BUILD_FLAG="install -Pbuild-distr -DskipRat -DskipTests" TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" MODULES="-pl zeppelin-server,zeppelin-web,spark/spark-dependencies,markdown,angular,shell -am" TEST_PROJECTS="-Dtests.to.exclude=**/org/apache/zeppelin/spark/*,**/HeliumApplicationFactoryTest.java -DfailIfNoTests=false"
 
@@ -114,68 +107,57 @@ matrix:
     # Run Spark integration test and unit test
 
     # Run spark integration of in one zeppelin instance (2.4, 2.3, 2.2)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest23,SparkIntegrationTest23,ZeppelinSparkClusterTest22,SparkIntegrationTest22 -DfailIfNoTests=false"
 
     # Run spark integration of in one zeppelin instance (2.1, 2.0, 1.6)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" PROFILE="-Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest21,SparkIntegrationTest21,ZeppelinSparkClusterTest20,SparkIntegrationTest20,ZeppelinSparkClusterTest16,SparkIntegrationTest16 -DfailIfNoTests=false"
 
     # JdbcIntegrationTest, Unit test of Spark 2.4 (Scala-2.11)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies,markdown -am" TEST_PROJECTS="-Dtest=JdbcIntegrationTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false"
 
     # Unit test of Spark 2.4 (Scala-2.12)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.12" PROFILE="-Pspark-2.4 -Pspark-scala-2.12 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false"
 
     # Unit test of Spark 2.3 (Scala-2.11) and Unit test python, jupyter and r interpreter under python2
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Pspark-scala-2.11 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false"
 
     # Unit test of Spark 2.2 (Scala-2.10) and Unit test python, jupyter and r interpreter under python3
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.2 -Pspark-scala-2.10 -Phadoop2" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false"
 
     # Unit test of Spark 2.1 (Scala-2.10)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.1 -Phadoop2 -Pspark-scala-2.10 -Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
     # Unit test of Spark 2.0  (Scala-2.10), Use python 3.5 because spark 2.0 doesn't support python 3.6 +
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-2.0 -Phadoop2 -Pspark-scala-2.10" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
     # Unit test of Spark 1.6  (Scala-2.10)
-    - sudo: required
-      jdk: "openjdk8"
+    - jdk: "openjdk8"
       dist: xenial
       env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Phadoop2 -Pspark-scala-2.10" R="true" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
     # Test python/pyspark with python 2, livy 0.5
-    - sudo: required
-      dist: xenial
+    - dist: xenial
       jdk: "openjdk8"
       env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" LIVY_VER="0.5.0-incubating" PROFILE="-Pspark-1.6 -Phadoop2 -Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl livy -am" TEST_PROJECTS=""
 
     # Test livy 0.5 with spark 2.2.0 under python3
-    - sudo: required
-      dist: xenial
+    - dist: xenial
       jdk: "openjdk8"
       env: PYTHON="3" SPARK_VER="2.2.0" HADOOP_VER="2.6" LIVY_VER="0.5.0-incubating" PROFILE="" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl livy -am" TEST_PROJECTS=""