You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2019/02/15 09:03:44 UTC

[zeppelin] branch master updated: [ZEPPELIN-4001] zeppelin-interpreter-integration is ignored due to wrong folder structure

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 97c845a  [ZEPPELIN-4001] zeppelin-interpreter-integration is ignored due to wrong folder structure
97c845a is described below

commit 97c845a6f39e60d93a553d9b438bb11fb38e0497
Author: Jeff Zhang <zj...@apache.org>
AuthorDate: Tue Feb 12 11:42:46 2019 +0800

    [ZEPPELIN-4001] zeppelin-interpreter-integration is ignored due to wrong folder structure
    
    ### What is this PR for?
    
    zeppelin-interpreter-integration is ignored due to wrong folder structure, that means all the integration of this module is ignored. This PR fix this issue, rename `zeppelin-interpreter-integration/src/main/test` to  `zeppelin-interpreter-integration/src/test`. Besides that, I split `SparkIntegrationTest` & `ZeppelinSparkClusterTest` into each spark version and each travis build will only run integration test and unit test for each spark version.  This could save travis build time as  [...]
    
    ### What type of PR is it?
    [Bug Fix | Refactoring]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://jira.apache.org/jira/browse/ZEPPELIN-4001
    
    ### How should this be tested?
    * CI pass
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zj...@apache.org>
    
    Closes #3306 from zjffdu/ZEPPELIN-4001 and squashes the following commits:
    
    2a90ce33f [Jeff Zhang] [ZEPPELIN-4001] zeppelin-interpreter-integration is ignored due to wrong folder structure
---
 .travis.yml                                        |  38 +++++---
 spark/interpreter/figure/unnamed-chunk-1-1.png     | Bin 407541 -> 0 bytes
 zeppelin-interpreter-integration/pom.xml           |  43 ++++-----
 .../apache/zeppelin/integration/DownloadUtils.java |   0
 .../zeppelin/integration/FlinkIntegrationTest.java |   1 -
 .../zeppelin/integration/JdbcIntegrationTest.java  |   2 +-
 .../zeppelin/integration/MiniHadoopCluster.java    |   0
 .../apache/zeppelin/integration/MiniZeppelin.java  |   0
 .../zeppelin/integration/SparkIntegrationTest.java |   4 +-
 .../integration/SparkIntegrationTest16.java}       |   6 +-
 .../integration/SparkIntegrationTest20.java}       |   8 +-
 .../integration/SparkIntegrationTest21.java}       |   8 +-
 .../integration/SparkIntegrationTest22.java}       |   8 +-
 .../integration/SparkIntegrationTest23.java}       |   8 +-
 .../integration/SparkIntegrationTest24.java}       |   8 +-
 .../integration/ZeppelinSparkClusterTest.java      | 104 ++++++++++++---------
 .../integration/ZeppelinSparkClusterTest16.java}   |   8 +-
 .../integration/ZeppelinSparkClusterTest20.java}   |  10 +-
 .../integration/ZeppelinSparkClusterTest21.java}   |  10 +-
 .../integration/ZeppelinSparkClusterTest22.java}   |  10 +-
 .../integration/ZeppelinSparkClusterTest23.java}   |  10 +-
 .../integration/ZeppelinSparkClusterTest24.java}   |   9 +-
 .../src/test/resources/log4j.properties            |  46 +++++++++
 zeppelin-server/pom.xml                            |  14 +++
 .../java/org/apache/zeppelin/utils/TestUtils.java  |   1 +
 .../interpreter/InterpreterSettingManager.java     |   2 -
 26 files changed, 207 insertions(+), 151 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index b05c750..2d0c020 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -37,9 +37,13 @@ addons:
     - r-source
     - sourceline: 'deb http://cran.rstudio.com/bin/linux/ubuntu trusty/'
       key_url: 'keyserver.ubuntu.com/pks/lookup?op=get&search=0x51716619E084DAB9'
+    - r-packages-trusty
+    - mysql-5.7-trusty
     packages:
     - r-base
     - r-base-dev
+    - mysql-server
+    - mysql-client
 
 env:
   global:
@@ -87,35 +91,43 @@ matrix:
       dist: trusty
       env: PYTHON="3" SPARKR="true" SCALA_VER="2.10" PROFILE="-Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl $(echo .,zeppelin-interpreter,zeppelin-interpreter-api,${INTERPRETERS} | sed 's/!//g')" TEST_PROJECTS=""
 
-    # Run ZeppelinSparkClusterTest & SparkIntegrationTest in one build would exceed the time limitation of travis, so running them separately
+    # Run Spark integration test and unit test separately for each spark version
 
-    # Integration test of spark interpreter with different spark versions under python2, only run ZeppelinSparkClusterTest. Also run spark unit test of spark 2.4.0 in this build. And run JdbcIntegrationTest here as well.
+    # ZeppelinSparkClusterTest24, SparkIntegrationTest24, JdbcIntegrationTest, Unit test of Spark 2.4
     - sudo: required
       jdk: "oraclejdk8"
       dist: trusty
-      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,jdbc,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,JdbcIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
-    # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTestPt1. Also run spark unit test of spark 2.3 in this build.
+    # ZeppelinSparkClusterTest23, SparkIntegrationTest23, Unit test of Spark 2.3
     - sudo: required
       jdk: "oraclejdk8"
       dist: trusty
-      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTestPt1,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest23,SparkIntegrationTest23,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
-    # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTestPt2. Also run spark unit test of spark 2.2 in this build.
+    # ZeppelinSparkClusterTest22, SparkIntegrationTest22, Unit test of Spark 2.2
     - sudo: required
       jdk: "oraclejdk8"
       dist: trusty
-      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.2 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTestPt2,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.2 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
-    # Test spark module for 2.1 with scala 2.10
-    - jdk: "oraclejdk8"
+    # ZeppelinSparkClusterTest21, SparkIntegrationTest21, Unit test of Spark 2.1
+    - sudo: required
+      jdk: "oraclejdk8"
       dist: trusty
-      env: PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-2.1 -Phadoop2 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.1 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest21,SparkIntegrationTest21,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
-    # Test spark module for 1.6 with scala 2.10
-    - jdk: "oraclejdk8"
+    # ZeppelinSparkClusterTest20, SparkIntegrationTest20, Unit test of Spark 2.0
+    - sudo: required
+      jdk: "oraclejdk8"
+      dist: trusty
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.0 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest20,SparkIntegrationTest20,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
+
+    # ZeppelinSparkClusterTest16, SparkIntegrationTest16, Unit test of Spark 1.6
+    - sudo: required
+      jdk: "oraclejdk8"
       dist: trusty
-      env: PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Phadoop2 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest16,SparkIntegrationTest16,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
     # Test python/pyspark with python 2, livy 0.5
     - sudo: required
diff --git a/spark/interpreter/figure/unnamed-chunk-1-1.png b/spark/interpreter/figure/unnamed-chunk-1-1.png
deleted file mode 100644
index 6f03c95..0000000
Binary files a/spark/interpreter/figure/unnamed-chunk-1-1.png and /dev/null differ
diff --git a/zeppelin-interpreter-integration/pom.xml b/zeppelin-interpreter-integration/pom.xml
index 0a93aa4..09c9710 100644
--- a/zeppelin-interpreter-integration/pom.xml
+++ b/zeppelin-interpreter-integration/pom.xml
@@ -58,6 +58,7 @@
       <groupId>org.apache.zeppelin</groupId>
       <artifactId>zeppelin-server</artifactId>
       <version>${project.version}</version>
+      <classifier>tests</classifier>
       <exclusions>
         <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
@@ -74,26 +75,25 @@
       </exclusions>
     </dependency>
 
-    <!--<dependency>-->
-      <!--<groupId>org.apache.zeppelin</groupId>-->
-      <!--<artifactId>zeppelin-server</artifactId>-->
-      <!--<version>${project.version}</version>-->
-      <!--<classifier>tests</classifier>-->
-      <!--<exclusions>-->
-        <!--<exclusion>-->
-          <!--<groupId>com.fasterxml.jackson.core</groupId>-->
-          <!--<artifactId>jackson-databind</artifactId>-->
-        <!--</exclusion>-->
-        <!--<exclusion>-->
-          <!--<groupId>com.fasterxml.jackson.core</groupId>-->
-          <!--<artifactId>jackson-annotations</artifactId>-->
-        <!--</exclusion>-->
-        <!--<exclusion>-->
-          <!--<groupId>com.fasterxml.jackson.core</groupId>-->
-          <!--<artifactId>jackson-core</artifactId>-->
-        <!--</exclusion>-->
-      <!--</exclusions>-->
-    <!--</dependency>-->
+    <dependency>
+      <groupId>org.apache.zeppelin</groupId>
+      <artifactId>zeppelin-server</artifactId>
+      <version>${project.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-annotations</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
 
     <!--test libraries-->
     <dependency>
@@ -131,9 +131,6 @@
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
           <forkMode>always</forkMode>
-          <systemProperties>
-            <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
-          </systemProperties>
           <environmentVariables>
             <ZEPPELIN_ZENGINE_TEST>1</ZEPPELIN_ZENGINE_TEST>
           </environmentVariables>
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/DownloadUtils.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/DownloadUtils.java
similarity index 100%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/DownloadUtils.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/DownloadUtils.java
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/FlinkIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java
similarity index 98%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/FlinkIntegrationTest.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java
index 526d389..b7a7da2 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/FlinkIntegrationTest.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java
@@ -28,7 +28,6 @@ import org.apache.zeppelin.interpreter.InterpreterFactory;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterSetting;
 import org.apache.zeppelin.interpreter.InterpreterSettingManager;
-import org.apache.zeppelin.interpreter.SparkDownloadUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/JdbcIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java
similarity index 98%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/JdbcIntegrationTest.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java
index 29ae06a..73913a6 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/JdbcIntegrationTest.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java
@@ -66,7 +66,7 @@ public class JdbcIntegrationTest {
     interpreterSetting.setProperty("default.user", "root");
     Dependency dependency = new Dependency("mysql:mysql-connector-java:5.1.46");
     interpreterSetting.setDependencies(Lists.newArrayList(dependency));
-    interpreterSettingManager.restart("jdbc");
+    interpreterSettingManager.restart(interpreterSetting.getId());
     interpreterSetting.waitForReady(60 * 1000);
     Interpreter jdbcInterpreter = interpreterFactory.getInterpreter("user1", "note1", "jdbc", "test");
     assertNotNull("JdbcInterpreter is null", jdbcInterpreter);
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/MiniHadoopCluster.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniHadoopCluster.java
similarity index 100%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/MiniHadoopCluster.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniHadoopCluster.java
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/MiniZeppelin.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniZeppelin.java
similarity index 100%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/MiniZeppelin.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniZeppelin.java
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java
similarity index 97%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTest.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java
index 0347937..03a482d 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTest.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java
@@ -29,7 +29,6 @@ import org.apache.zeppelin.interpreter.InterpreterFactory;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterSetting;
 import org.apache.zeppelin.interpreter.InterpreterSettingManager;
-import org.apache.zeppelin.interpreter.SparkDownloadUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -131,6 +130,7 @@ public abstract class SparkIntegrationTest {
     sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
     sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false");
     sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
+    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
 
     testInterpreterBasics();
 
@@ -153,6 +153,7 @@ public abstract class SparkIntegrationTest {
     sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
     sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec());
     sparkInterpreterSetting.setProperty("spark.driver.memory", "512m");
+    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
 
     testInterpreterBasics();
 
@@ -175,6 +176,7 @@ public abstract class SparkIntegrationTest {
     sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
     sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec());
     sparkInterpreterSetting.setProperty("spark.driver.memory", "512m");
+    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
 
     testInterpreterBasics();
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest16.java
similarity index 87%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest16.java
index 125566d..8f5aacb 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest16.java
@@ -24,17 +24,15 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class SparkIntegrationTest16 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public SparkIntegrationTest16(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
             {"1.6.3"}
     });
   }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt1.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest20.java
similarity index 85%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt1.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest20.java
index 3996f06..4f3ebd8 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt1.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest20.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt1 extends SparkIntegrationTest{
+public class SparkIntegrationTest20 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt1(String sparkVersion) {
+  public SparkIntegrationTest20(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.4.0"},
-            {"2.3.2"},
-            {"2.2.1"},
+            {"2.0.2"}
     });
   }
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest21.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest21.java
index 125566d..e60fa2b 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest21.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class SparkIntegrationTest21 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public SparkIntegrationTest21(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.1.2"}
     });
   }
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest22.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest22.java
index 125566d..35d245e 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest22.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class SparkIntegrationTest22 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public SparkIntegrationTest22(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.2.1"}
     });
   }
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest23.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest23.java
index 125566d..ca960d3 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest23.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class SparkIntegrationTest23 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public SparkIntegrationTest23(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.3.2"}
     });
   }
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest24.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest24.java
index 125566d..0441cac 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest24.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class SparkIntegrationTest24 extends SparkIntegrationTest{
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public SparkIntegrationTest24(String sparkVersion) {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.4.0"}
     });
   }
 
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java
similarity index 90%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java
index b565f99..daedfc1 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java
@@ -17,6 +17,7 @@
 package org.apache.zeppelin.integration;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
 import org.apache.zeppelin.conf.ZeppelinConfiguration;
 import org.apache.zeppelin.display.AngularObject;
 import org.apache.zeppelin.interpreter.InterpreterException;
@@ -41,7 +42,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
+import java.io.FileWriter;
 import java.io.IOException;
+import java.io.StringReader;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -55,8 +58,7 @@ import static org.junit.Assert.assertTrue;
 /**
  * Test against spark cluster.
  */
-@RunWith(value = Parameterized.class)
-public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
+public abstract class ZeppelinSparkClusterTest extends AbstractTestRestApi {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(ZeppelinSparkClusterTest.class);
 
@@ -81,18 +83,6 @@ public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
     }
   }
 
-  @Parameterized.Parameters
-  public static List<Object[]> data() {
-    return Arrays.asList(new Object[][]{
-            {"2.4.0"},
-            {"2.3.2"},
-            {"2.2.1"},
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
-    });
-  }
-
   public void setupSparkInterpreter(String sparkHome) throws InterpreterException {
     InterpreterSetting sparkIntpSetting = TestUtils.getInstance(Notebook.class).getInterpreterSettingManager()
         .getInterpreterSettingByName("spark");
@@ -213,43 +203,65 @@ public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
     TestUtils.getInstance(Notebook.class).removeNote(note.getId(), anonymous);
   }
 
-    @Test
-    public void sparkReadJSONTest() throws IOException {
-      Note note = ZeppelinServer.notebook.createNote(anonymous);
-      Paragraph p = note.addNewParagraph(anonymous);
-      p.setText("%spark val jsonStr = \"\"\"{ \"metadata\": { \"key\": 84896, \"value\": 54 }}\"\"\"\n" +
-              "spark.read.json(Seq(jsonStr).toDS)");
-      note.run(p.getId(), true);
-      assertEquals(Status.FINISHED, p.getStatus());
-      assertTrue(p.getResult().message().get(0).getData().contains(
-              "org.apache.spark.sql.DataFrame = [metadata: struct<key: bigint, value: bigint>]\n"));
+  @Test
+  public void sparkReadJSONTest() throws IOException {
+    Note note = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous);
+    Paragraph p = note.addNewParagraph(anonymous);
+    File tmpJsonFile = File.createTempFile("test", ".json");
+    FileWriter jsonFileWriter = new FileWriter(tmpJsonFile);
+    IOUtils.copy(new StringReader("{\"metadata\": { \"key\": 84896, \"value\": 54 }}\n"),
+            jsonFileWriter);
+    jsonFileWriter.close();
+    if (isSpark2()) {
+      p.setText("%spark spark.read.json(\"file://" + tmpJsonFile.getAbsolutePath() + "\")");
+    } else {
+      p.setText("%spark sqlContext.read.json(\"file://" + tmpJsonFile.getAbsolutePath() + "\")");
     }
+    note.run(p.getId(), true);
+    assertEquals(Status.FINISHED, p.getStatus());
+    if (isSpark2()) {
+      assertTrue(p.getReturn().message().get(0).getData().contains(
+              "org.apache.spark.sql.DataFrame = [metadata: struct<key: bigint, value: bigint>]"));
+    } else {
+      assertTrue(p.getReturn().message().get(0).getData().contains(
+              "org.apache.spark.sql.DataFrame = [metadata: struct<key:bigint,value:bigint>]"));
+    }
+    TestUtils.getInstance(Notebook.class).removeNote(note.getId(), anonymous);
+  }
 
-    @Test
-    public void sparkReadCSVTest() throws IOException {
-      Note note = ZeppelinServer.notebook.createNote(anonymous);
-      Paragraph p = note.addNewParagraph(anonymous);
-      p.setText("%spark val csvStr = \"\"\"84896,54\"\"\"\n" +
-              "spark.read.csv(Seq(csvStr).toDS)");
-      note.run(p.getId(), true);
-      assertEquals(Status.FINISHED, p.getStatus());
-      assertTrue(p.getResult().message().get(0).getData().contains(
-              "org.apache.spark.sql.DataFrame = [_c0: string, _c1: string]\n"));
+  @Test
+  public void sparkReadCSVTest() throws IOException {
+    if (!isSpark2()) {
+      // csv if not supported in spark 1.x natively
+      return;
     }
+    Note note = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous);
+    Paragraph p = note.addNewParagraph(anonymous);
+    File tmpCSVFile = File.createTempFile("test", ".csv");
+    FileWriter csvFileWriter = new FileWriter(tmpCSVFile);
+    IOUtils.copy(new StringReader("84896,54"), csvFileWriter);
+    csvFileWriter.close();
+    p.setText("%spark spark.read.csv(\"file://" + tmpCSVFile.getAbsolutePath() + "\")");
+    note.run(p.getId(), true);
+    assertEquals(Status.FINISHED, p.getStatus());
+    assertTrue(p.getReturn().message().get(0).getData().contains(
+            "org.apache.spark.sql.DataFrame = [_c0: string, _c1: string]\n"));
+    TestUtils.getInstance(Notebook.class).removeNote(note.getId(), anonymous);
+  }
 
-    @Test
-    public void sparkSQLTest() throws IOException {
-      Note note = ZeppelinServer.notebook.createNote(anonymous);
-      // test basic dataframe api
-      Paragraph p = note.addNewParagraph(anonymous);
-      p.setText("%spark val df=sqlContext.createDataFrame(Seq((\"hello\",20)))\n" +
-              "df.collect()");
-      note.run(p.getId(), true);
-      assertEquals(Status.FINISHED, p.getStatus());
-      assertTrue(p.getResult().message().get(0).getData().contains(
-              "Array[org.apache.spark.sql.Row] = Array([hello,20])"));
+  @Test
+  public void sparkSQLTest() throws IOException {
+    Note note = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous);
+    // test basic dataframe api
+    Paragraph p = note.addNewParagraph(anonymous);
+    p.setText("%spark val df=sqlContext.createDataFrame(Seq((\"hello\",20)))\n" +
+            "df.collect()");
+    note.run(p.getId(), true);
+    assertEquals(Status.FINISHED, p.getStatus());
+    assertTrue(p.getReturn().message().get(0).getData().contains(
+            "Array[org.apache.spark.sql.Row] = Array([hello,20])"));
 
-      // test display DataFrame
+    // test display DataFrame
     p = note.addNewParagraph(anonymous);
     p.setText("%spark val df=sqlContext.createDataFrame(Seq((\"hello\",20)))\n" +
         "z.show(df)");
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest16.java
similarity index 87%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest16.java
index 125566d..954f024 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest16.java
@@ -24,19 +24,17 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest16 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest16(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
+
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
             {"1.6.3"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest20.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest20.java
index 125566d..22687d9 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest20.java
@@ -24,19 +24,17 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest20 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest20(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
+
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.0.2"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest21.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest21.java
index 125566d..4ac4676 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest21.java
@@ -24,19 +24,17 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest21 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest21(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
+
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.1.2"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest22.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest22.java
index 125566d..cc351dd 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest22.java
@@ -24,19 +24,17 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest22 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest22(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
+
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.2.1"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest23.java
similarity index 85%
copy from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
copy to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest23.java
index 125566d..22ef673 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest23.java
@@ -24,19 +24,17 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest23 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest23(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
+
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.3.2"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest24.java
similarity index 85%
rename from zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
rename to zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest24.java
index 125566d..b7fadd4 100644
--- a/zeppelin-interpreter-integration/src/main/test/org/apache/zeppelin/integration/SparkIntegrationTestPt2.java
+++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest24.java
@@ -24,19 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 @RunWith(value = Parameterized.class)
-public class SparkIntegrationTestPt2 extends SparkIntegrationTest{
+public class ZeppelinSparkClusterTest24 extends ZeppelinSparkClusterTest {
 
-  public SparkIntegrationTestPt2(String sparkVersion) {
+  public ZeppelinSparkClusterTest24(String sparkVersion) throws Exception {
     super(sparkVersion);
   }
 
   @Parameterized.Parameters
   public static List<Object[]> data() {
     return Arrays.asList(new Object[][]{
-            {"2.1.2"},
-            {"2.0.2"},
-            {"1.6.3"}
+            {"2.4.0"}
     });
   }
-
 }
diff --git a/zeppelin-interpreter-integration/src/test/resources/log4j.properties b/zeppelin-interpreter-integration/src/test/resources/log4j.properties
new file mode 100644
index 0000000..50300f1
--- /dev/null
+++ b/zeppelin-interpreter-integration/src/test/resources/log4j.properties
@@ -0,0 +1,46 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c:%L - %m%n
+#log4j.appender.stdout.layout.ConversionPattern=
+#%5p [%t] (%F:%L) - %m%n
+#%-4r [%t] %-5p %c %x - %m%n
+#
+
+# Root logger option
+log4j.rootLogger=INFO, stdout
+
+#mute some noisy guys
+log4j.logger.org.apache.hadoop.mapred=WARN
+log4j.logger.org.apache.hadoop.hive.ql=WARN
+log4j.logger.org.apache.hadoop.hive.metastore=WARN
+log4j.logger.org.apache.haadoop.hive.service.HiveServer=WARN
+
+log4j.logger.org.quartz=WARN
+log4j.logger.DataNucleus=WARN
+log4j.logger.DataNucleus.MetaData=ERROR
+log4j.logger.DataNucleus.Datastore=ERROR
+
+# Log all JDBC parameters
+log4j.logger.org.hibernate.type=ALL
+log4j.logger.org.apache.hadoop=WARN
+
+log4j.logger.org.apache.zeppelin.interpreter=DEBUG
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index 75f0af6..5163680 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -447,6 +447,20 @@
         </executions>
       </plugin>
 
+      <!-- publish test jar as well so that zeppelin-interpreter-integration can use it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <version>3.0.2</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-dependency-plugin</artifactId>
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java
index 9876ab6..930817b 100644
--- a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java
@@ -21,6 +21,7 @@ import org.apache.zeppelin.server.ZeppelinServer;
 import org.glassfish.hk2.api.ServiceLocator;
 import org.glassfish.hk2.api.ServiceLocatorFactory;
 
+//TODO(zjffdu) refactor this class, it should not be called by non-test code. Or rename it.
 public class TestUtils {
   public static <T> T getInstance(Class<T> clazz) {
     checkCalledByTestMethod();
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java
index de2e391..44d99be 100644
--- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java
@@ -632,7 +632,6 @@ public class InterpreterSettingManager implements NoteEventListener {
    * changed
    */
   private void copyDependenciesFromLocalPath(final InterpreterSetting setting) {
-    setting.setStatus(InterpreterSetting.Status.DOWNLOADING_DEPENDENCIES);
     final Thread t = new Thread() {
       public void run() {
         try {
@@ -649,7 +648,6 @@ public class InterpreterSettingManager implements NoteEventListener {
               }
             }
           }
-          setting.setStatus(InterpreterSetting.Status.READY);
         } catch (Exception e) {
           LOGGER.error(String.format("Error while copying deps for interpreter group : %s," +
                   " go to interpreter setting page click on edit and save it again to make " +