You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by fe...@apache.org on 2016/10/26 19:33:32 UTC

zeppelin git commit: ZEPPELIN-1477. Add Integration Test for LivyInterpreter

Repository: zeppelin
Updated Branches:
  refs/heads/master ba2b90c2a -> 0af86e393


ZEPPELIN-1477. Add Integration Test for LivyInterpreter

### What is this PR for?
This PR fix several issues of LivyInterpeter test.
* Livy interpreter's test code is not in the right place, so it never runs.
* LivyHelperTest would fail.
* No Integration test for LivyInterpreter so any following change is not easy to be tested.

This PR would fix the above issues. Regarding the integration test, some of livy's artifact is not available in repository, so I have to copy them to livy/local-maven-repo as local repository.  And LivyInterperter's integration test require spark and livy to be installed. For now you have to download spark and livy manually. Please use spark 1.5.x and livy 0.2 which is currently supported. Download livy 0.2.0 from here [https://github.com/cloudera/livy/releases](https://github.com/cloudera/livy/releases)

And use the following command to execute the LivyIntegrationTest.
```
export SPARK_HOME=<path_to_spark>
export LIVY_HOME=<path_to_livy>
mvn clean package -pl 'livy' -Dtest=LivyIntegrationTest
```
If you hit any issues, you can check logs under `livy/target/tmp`.

### What type of PR is it?
[Bug Fix | Improvement]

### Todos
* [ ] - Task

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-1477

### Screenshots (if appropriate)
![image](https://cloud.githubusercontent.com/assets/164491/18861677/b1389622-84b9-11e6-8b0a-424457ded975.png)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zj...@apache.org>

Closes #1462 from zjffdu/ZEPPELIN-1477 and squashes the following commits:

75914b9 [Jeff Zhang] remove livy local jars
fee61f9 [Jeff Zhang] add more test
1b9fbbc [Jeff Zhang] add missing dependencies
e8ceff5 [Jeff Zhang] add missing livy jars
8632466 [Jeff Zhang] fix rat check
f560a92 [Jeff Zhang] ZEPPELIN-1477. Add Integration Test for LivyInterpreter


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/0af86e39
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/0af86e39
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/0af86e39

Branch: refs/heads/master
Commit: 0af86e3934255272c96bbf44a0ed7383dfe0145b
Parents: ba2b90c
Author: Jeff Zhang <zj...@apache.org>
Authored: Thu Oct 20 14:08:16 2016 +0800
Committer: Felix Cheung <fe...@apache.org>
Committed: Wed Oct 26 12:33:26 2016 -0700

----------------------------------------------------------------------
 livy/pom.xml                                    | 106 ++++++++
 .../org/apache/zeppelin/livy/LivyHelper.java    |   1 +
 .../apache/zeppelin/livy/LivyHelperTest.java    | 114 ---------
 .../zeppelin/livy/LivyInterpreterTest.java      |  86 -------
 .../apache/zeppelin/livy/LivyHelperTest.java    | 115 +++++++++
 .../zeppelin/livy/LivyIntegrationTest.java      | 242 +++++++++++++++++++
 .../zeppelin/livy/LivyInterpreterTest.java      |  86 +++++++
 livy/src/test/resources/log4j.properties        |  24 ++
 8 files changed, 574 insertions(+), 200 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/pom.xml
----------------------------------------------------------------------
diff --git a/livy/pom.xml b/livy/pom.xml
index 8accaa6..df2bf77 100644
--- a/livy/pom.xml
+++ b/livy/pom.xml
@@ -40,6 +40,7 @@
         <achilles.version>3.2.4-Zeppelin</achilles.version>
         <assertj.version>1.7.0</assertj.version>
         <mockito.version>1.9.5</mockito.version>
+        <livy.version>0.2.0</livy.version>
     </properties>
 
     <dependencies>
@@ -106,8 +107,58 @@
             <version>4.3.0.RELEASE</version>
         </dependency>
 
+        <dependency>
+            <groupId>com.cloudera.livy</groupId>
+            <artifactId>livy-integration-test</artifactId>
+            <version>${livy.version}</version>
+            <scope>compile</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.xerial.snappy</groupId>
+                    <artifactId>snappy-java</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.cloudera.livy</groupId>
+            <artifactId>livy-test-lib</artifactId>
+            <version>${livy.version}</version>
+            <scope>compile</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.xerial.snappy</groupId>
+                    <artifactId>snappy-java</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.cloudera.livy</groupId>
+            <artifactId>livy-core</artifactId>
+            <version>${livy.version}</version>
+            <scope>compile</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.xerial.snappy</groupId>
+                    <artifactId>snappy-java</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
     </dependencies>
 
+    <repositories>
+        <repository>
+            <id>ossrh</id>
+            <name>ossrh repository</name>
+            <url>https://oss.sonatype.org/content/repositories/releases/</url>
+            <releases>
+                <enabled>true</enabled>
+            </releases>
+            <snapshots>
+                <enabled>false</enabled>
+            </snapshots>
+        </repository>
+    </repositories>
+
     <build>
         <plugins>
             <plugin>
@@ -165,6 +216,61 @@
                     </execution>
                 </executions>
             </plugin>
+
+            <plugin>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <version>2.16</version>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>integration-test</goal>
+                            <goal>verify</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <configuration>
+                    <systemPropertyVariables>
+                        <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
+                    </systemPropertyVariables>
+                    <argLine>-Xmx2048m</argLine>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <version>1.8</version>
+                <executions>
+                    <!-- Cleans up files that tests append to (because we have two test plugins). -->
+                    <execution>
+                        <id>pre-test-clean</id>
+                        <phase>generate-test-resources</phase>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                        <configuration>
+                            <target>
+                                <delete file="${project.build.directory}/unit-tests.log" quiet="true" />
+                                <delete file="${project.build.directory}/jacoco.exec" quiet="true" />
+                                <delete dir="${project.build.directory}/tmp" quiet="true" />
+                            </target>
+                        </configuration>
+                    </execution>
+                    <!-- Create the temp directory to be  used by tests. -->
+                    <execution>
+                        <id>create-tmp-dir</id>
+                        <phase>generate-test-resources</phase>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                        <configuration>
+                            <target>
+                                <mkdir dir="${project.build.directory}/tmp" />
+                            </target>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
         </plugins>
     </build>
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/main/java/org/apache/zeppelin/livy/LivyHelper.java
----------------------------------------------------------------------
diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivyHelper.java b/livy/src/main/java/org/apache/zeppelin/livy/LivyHelper.java
index ab0b499..209a842 100644
--- a/livy/src/main/java/org/apache/zeppelin/livy/LivyHelper.java
+++ b/livy/src/main/java/org/apache/zeppelin/livy/LivyHelper.java
@@ -321,6 +321,7 @@ public class LivyHelper {
             + userSessionMap.get(context.getAuthenticationInfo().getUser())
             + "/statements/" + id,
         "GET", null, context.getParagraphId());
+    LOGGER.debug("statement {} response: {}", id, json);
     try {
       Map jsonMap = gson.fromJson(json,
           new TypeToken<Map>() {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/main/test/org/apache/zeppelin/livy/LivyHelperTest.java
----------------------------------------------------------------------
diff --git a/livy/src/main/test/org/apache/zeppelin/livy/LivyHelperTest.java b/livy/src/main/test/org/apache/zeppelin/livy/LivyHelperTest.java
deleted file mode 100644
index 4928901..0000000
--- a/livy/src/main/test/org/apache/zeppelin/livy/LivyHelperTest.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.zeppelin.livy;
-
-import com.google.gson.GsonBuilder;
-import org.apache.zeppelin.interpreter.InterpreterContext;
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.hamcrest.CoreMatchers;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ErrorCollector;
-import org.junit.runner.RunWith;
-import org.mockito.Answers;
-import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
-
-import java.util.HashMap;
-import java.util.Properties;
-
-import static org.mockito.Mockito.doReturn;
-
-/**
- * Created for org.apache.zeppelin.livy on 22/04/16.
- */
-
-@RunWith(MockitoJUnitRunner.class)
-public class LivyHelperTest {
-
-  @Rule
-  public ErrorCollector collector = new ErrorCollector();
-
-  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
-  private static LivyPySparkInterpreter interpreter;
-
-  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
-  private InterpreterContext interpreterContext;
-
-  @Mock(answer = Answers.CALLS_REAL_METHODS)
-  private LivyHelper livyHelper;
-
-  @Before
-  public void prepareContext() throws Exception {
-    interpreter.userSessionMap = new HashMap<>();
-    interpreter.userSessionMap.put(null, 1);
-
-    Properties properties = new Properties();
-    properties.setProperty("zeppelin.livy.url", "http://localhost:8998");
-    livyHelper.property = properties;
-    livyHelper.paragraphHttpMap = new HashMap<>();
-    livyHelper.gson = new GsonBuilder().setPrettyPrinting().create();
-
-
-    doReturn("{\"id\":1,\"state\":\"idle\",\"kind\":\"spark\",\"proxyUser\":\"null\",\"log\":[]}")
-        .when(livyHelper)
-        .executeHTTP(
-            livyHelper.property.getProperty("zeppelin.livy.url") + "/sessions",
-            "POST",
-            "{\"kind\": \"spark\", \"proxyUser\": \"null\"}",
-            null
-        );
-
-    doReturn("{\"id\":1,\"state\":\"available\",\"output\":{\"status\":\"ok\"," +
-        "\"execution_count\":1,\"data\":{\"text/plain\":\"1\"}}}")
-        .when(livyHelper)
-        .executeHTTP(
-            livyHelper.property.getProperty("zeppelin.livy.url") + "/sessions/1/statements",
-            "POST",
-            "{\"code\": \"print(1)\" }",
-            null
-        );
-
-  }
-
-
-  @Test
-  public void checkCreateSession() {
-    try {
-      Integer sessionId = livyHelper.createSession(interpreterContext, "spark");
-
-      collector.checkThat("check sessionId", 1, CoreMatchers.equalTo(sessionId));
-
-    } catch (Exception e) {
-      collector.addError(e);
-    }
-  }
-
-  @Test
-  public void checkInterpret() {
-    try {
-      InterpreterResult result = livyHelper.interpret("print(1)", interpreterContext, interpreter.userSessionMap);
-
-      collector.checkThat("check sessionId", InterpreterResult.Code.SUCCESS, CoreMatchers.equalTo(result.code()));
-
-    } catch (Exception e) {
-      collector.addError(e);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/main/test/org/apache/zeppelin/livy/LivyInterpreterTest.java
----------------------------------------------------------------------
diff --git a/livy/src/main/test/org/apache/zeppelin/livy/LivyInterpreterTest.java b/livy/src/main/test/org/apache/zeppelin/livy/LivyInterpreterTest.java
deleted file mode 100644
index b21afa4..0000000
--- a/livy/src/main/test/org/apache/zeppelin/livy/LivyInterpreterTest.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.zeppelin.livy;
-
-
-import org.apache.zeppelin.interpreter.InterpreterContext;
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.hamcrest.CoreMatchers;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ErrorCollector;
-import org.junit.runner.RunWith;
-import org.mockito.Answers;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
-
-import java.util.HashMap;
-import java.util.Properties;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.doReturn;
-
-@RunWith(MockitoJUnitRunner.class)
-public class LivyInterpreterTest {
-
-  @Rule
-  public ErrorCollector collector = new ErrorCollector();
-
-  private static LivyPySparkInterpreter interpreter;
-
-  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
-  private InterpreterContext interpreterContext;
-
-  @AfterClass
-  public static void tearDown() {
-    interpreter.close();
-  }
-
-  @Before
-  public void prepareContext() throws Exception {
-    interpreter = new LivyPySparkInterpreter(new Properties());
-    interpreter.userSessionMap = new HashMap<>();
-    interpreter.userSessionMap.put(null, 0);
-    interpreter.livyHelper = Mockito.mock(LivyHelper.class);
-    interpreter.open();
-
-    doReturn(new InterpreterResult(InterpreterResult.Code.SUCCESS)).when(interpreter.livyHelper)
-        .interpret("print \"x is 1.\"", interpreterContext, interpreter.userSessionMap);
-  }
-
-  @Test
-  public void checkInitVariables() throws Exception {
-    collector.checkThat("Check that, if userSessionMap is made: ",
-        interpreter.userSessionMap, CoreMatchers.notNullValue());
-  }
-
-  @Test
-  public void checkBasicInterpreter() throws Exception {
-
-    String paragraphString = "print \"x is 1.\"";
-
-    final InterpreterResult actual = interpreter.interpret(paragraphString, interpreterContext);
-
-    collector.checkThat("Check that, result is computed: ",
-        actual.code(), CoreMatchers.equalTo(InterpreterResult.Code.SUCCESS));
-    assertThat(actual).isNotNull();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/test/java/org/apache/zeppelin/livy/LivyHelperTest.java
----------------------------------------------------------------------
diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyHelperTest.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyHelperTest.java
new file mode 100644
index 0000000..32d682f
--- /dev/null
+++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyHelperTest.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.zeppelin.livy;
+
+import com.google.gson.GsonBuilder;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.hamcrest.CoreMatchers;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ErrorCollector;
+import org.junit.runner.RunWith;
+import org.mockito.Answers;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Properties;
+
+import static org.mockito.Mockito.doReturn;
+
+/**
+ * Created for org.apache.zeppelin.livy on 22/04/16.
+ */
+
+@RunWith(MockitoJUnitRunner.class)
+public class LivyHelperTest {
+
+  @Rule
+  public ErrorCollector collector = new ErrorCollector();
+
+  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
+  private static LivyPySparkInterpreter interpreter;
+
+  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
+  private InterpreterContext interpreterContext;
+
+  @Mock(answer = Answers.CALLS_REAL_METHODS)
+  private LivyHelper livyHelper;
+
+  @Before
+  public void prepareContext() throws Exception {
+    interpreter.userSessionMap = new HashMap<>();
+    interpreter.userSessionMap.put(null, 1);
+
+    Properties properties = new Properties();
+    properties.setProperty("zeppelin.livy.url", "http://localhost:8998");
+    livyHelper.property = properties;
+    livyHelper.paragraphHttpMap = new HashMap<>();
+    livyHelper.gson = new GsonBuilder().setPrettyPrinting().create();
+    livyHelper.LOGGER = LoggerFactory.getLogger(LivyHelper.class);
+
+    doReturn("{\"id\":1,\"state\":\"idle\",\"kind\":\"spark\",\"proxyUser\":\"null\",\"log\":[]}")
+        .when(livyHelper)
+        .executeHTTP(
+            livyHelper.property.getProperty("zeppelin.livy.url") + "/sessions",
+            "POST",
+            "{\"kind\": \"spark\", \"conf\": {}, \"proxyUser\": null}",
+            null
+        );
+
+    doReturn("{\"id\":1,\"state\":\"available\",\"output\":{\"status\":\"ok\"," +
+        "\"execution_count\":1,\"data\":{\"text/plain\":\"1\"}}}")
+        .when(livyHelper)
+        .executeHTTP(
+            livyHelper.property.getProperty("zeppelin.livy.url") + "/sessions/1/statements",
+            "POST",
+            "{\"code\": \"print(1)\"}",
+            null
+        );
+
+  }
+
+
+  @Test
+  public void checkCreateSession() {
+    try {
+      Integer sessionId = livyHelper.createSession(interpreterContext, "spark");
+
+      collector.checkThat("check sessionId", 1, CoreMatchers.equalTo(sessionId));
+
+    } catch (Exception e) {
+      collector.addError(e);
+    }
+  }
+
+  @Test
+  public void checkInterpret() {
+    try {
+      InterpreterResult result = livyHelper.interpret("print(1)", interpreterContext, interpreter.userSessionMap);
+
+      collector.checkThat("check sessionId", InterpreterResult.Code.SUCCESS, CoreMatchers.equalTo(result.code()));
+
+    } catch (Exception e) {
+      collector.addError(e);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/test/java/org/apache/zeppelin/livy/LivyIntegrationTest.java
----------------------------------------------------------------------
diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyIntegrationTest.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyIntegrationTest.java
new file mode 100644
index 0000000..6df3700
--- /dev/null
+++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyIntegrationTest.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.livy;
+
+
+import com.cloudera.livy.test.framework.Cluster;
+import com.cloudera.livy.test.framework.Cluster$;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterOutput;
+import org.apache.zeppelin.interpreter.InterpreterOutputListener;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.user.AuthenticationInfo;
+import org.junit.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class LivyIntegrationTest {
+
+  private static Logger LOGGER = LoggerFactory.getLogger(LivyIntegrationTest.class);
+  private static Cluster cluster;
+  private static Properties properties;
+
+  @BeforeClass
+  public static void setUp() {
+    if (!checkPreCondition()) {
+      return;
+    }
+    cluster = Cluster$.MODULE$.get();
+    LOGGER.info("Starting livy at {}", cluster.livyEndpoint());
+    properties = new Properties();
+    properties.setProperty("zeppelin.livy.url", cluster.livyEndpoint());
+    properties.setProperty("zeppelin.livy.create.session.retries", "120");
+    properties.setProperty("zeppelin.livy.spark.sql.maxResult", "100");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    if (cluster != null) {
+      cluster.cleanUp();
+    }
+  }
+
+  public static boolean checkPreCondition() {
+    if (System.getenv("LIVY_HOME") == null) {
+      LOGGER.warn(("livy integration is skipped because LIVY_HOME is not set"));
+      return false;
+    }
+    if (System.getenv("SPARK_HOME") == null) {
+      LOGGER.warn(("livy integration is skipped because SPARK_HOME is not set"));
+      return false;
+    }
+    return true;
+  }
+
+  @Test
+  public void testSparkInterpreter() {
+    if (!checkPreCondition()) {
+      return;
+    }
+
+    LivySparkInterpreter sparkInterpreter = new LivySparkInterpreter(properties);
+    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
+    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
+    InterpreterOutput output = new InterpreterOutput(outputListener);
+    InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "title",
+        "text", authInfo, null, null, null, null, null, output);
+    sparkInterpreter.open();
+    InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+
+    // test RDD api
+    outputListener.reset();
+    result = sparkInterpreter.interpret("sc.parallelize(1 to 10).sum()", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertTrue(outputListener.getOutputAppended().contains("Double = 55.0"));
+
+    // test DataFrame api
+    outputListener.reset();
+    sparkInterpreter.interpret("val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n"
+        + "import sqlContext.implicits._", context);
+    result = sparkInterpreter.interpret("val df=sqlContext.createDataFrame(Seq((\"hello\",20)))\n"
+            + "df.collect()" , context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertTrue(outputListener.getOutputAppended()
+            .contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
+    sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
+
+    // test LivySparkSQLInterpreter which share the same SparkContext with LivySparkInterpreter
+    outputListener.reset();
+    LivySparkSQLInterpreter sqlInterpreter = new LivySparkSQLInterpreter(properties);
+    sqlInterpreter.open();
+    result = sqlInterpreter.interpret("select * from df", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TABLE, result.type());
+    // TODO (zjffdu), \t at the end of each line is not necessary, it is a bug of LivySparkSQLInterpreter
+    assertEquals("_1\t_2\t\nhello\t20\t\n", result.message());
+
+    // single line comment
+    outputListener.reset();
+    String singleLineComment = "// my comment";
+    result = sparkInterpreter.interpret(singleLineComment, context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertNull(result.message());
+
+    // multiple line comment
+    outputListener.reset();
+    String multipleLineComment = "/* multiple \n" + "line \n" + "comment */";
+    result = sparkInterpreter.interpret(multipleLineComment, context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertNull(result.message());
+
+    // multi-line string
+    outputListener.reset();
+    String multiLineString = "val str = \"\"\"multiple\n" +
+            "line\"\"\"\n" +
+            "println(str)";
+    result = sparkInterpreter.interpret(multiLineString, context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertNull(result.message());
+    assertTrue(outputListener.getOutputAppended().contains("multiple\nline"));
+
+    // case class
+    outputListener.reset();
+    String caseClassCode = "case class Person(id:Int, \n" +
+            "name:String)\n" +
+            "val p=Person(1, \"name_a\")";
+    result = sparkInterpreter.interpret(caseClassCode, context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertNull(result.message());
+    assertTrue(outputListener.getOutputAppended().contains("defined class Person"));
+
+    // object class
+    outputListener.reset();
+    String objectClassCode = "object Person {}";
+    result = sparkInterpreter.interpret(objectClassCode, context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertNull(result.message());
+    assertTrue(outputListener.getOutputAppended().contains("defined module Person"));
+  }
+
+  @Test
+  public void testPySparkInterpreter() {
+    if (!checkPreCondition()) {
+      return;
+    }
+
+    LivyPySparkInterpreter pysparkInterpreter = new LivyPySparkInterpreter(properties);
+    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
+    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
+    InterpreterOutput output = new InterpreterOutput(outputListener);
+    InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "title",
+            "text", authInfo, null, null, null, null, null, output);
+    pysparkInterpreter.open();
+    InterpreterResult result = pysparkInterpreter.interpret("sc.version", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+
+    // test RDD api
+    outputListener.reset();
+    result = pysparkInterpreter.interpret("sc.range(1, 10).sum()", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertEquals("45", result.message());
+
+    // test DataFrame api
+    outputListener.reset();
+    pysparkInterpreter.interpret("from pyspark.sql import SQLContext\n"
+        + "sqlContext = SQLContext(sc)", context);
+    result = pysparkInterpreter.interpret("df=sqlContext.createDataFrame([(\"hello\",20)])\n"
+            + "df.collect()" , context);
+    assertTrue(result.message().contains("[Row(_1=u'hello', _2=20)]"));
+    assertEquals(InterpreterResult.Type.TEXT, result.type());
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+  }
+
+  @Test
+  public void testSparkRInterpreter() {
+    if (!checkPreCondition()) {
+      return;
+    }
+    // TODO (zjffdu),  Livy's SparkRIntepreter has some issue, do it after livy-0.3 release.
+  }
+
+  public static class MyInterpreterOutputListener implements InterpreterOutputListener {
+    private StringBuilder outputAppended = new StringBuilder();
+    private StringBuilder outputUpdated = new StringBuilder();
+
+    @Override
+    public void onAppend(InterpreterOutput out, byte[] line) {
+      LOGGER.info("onAppend:" + new String(line));
+      outputAppended.append(new String(line));
+    }
+
+    @Override
+    public void onUpdate(InterpreterOutput out, byte[] output) {
+      LOGGER.info("onUpdate:" + new String(output));
+      outputUpdated.append(new String(output));
+    }
+
+    public String getOutputAppended() {
+      return outputAppended.toString();
+    }
+
+    public String getOutputUpdated() {
+      return outputUpdated.toString();
+    }
+
+    public void reset() {
+      outputAppended = new StringBuilder();
+      outputUpdated = new StringBuilder();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterTest.java
----------------------------------------------------------------------
diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterTest.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterTest.java
new file mode 100644
index 0000000..b21afa4
--- /dev/null
+++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterTest.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.zeppelin.livy;
+
+
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.hamcrest.CoreMatchers;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ErrorCollector;
+import org.junit.runner.RunWith;
+import org.mockito.Answers;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import java.util.HashMap;
+import java.util.Properties;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.doReturn;
+
+@RunWith(MockitoJUnitRunner.class)
+public class LivyInterpreterTest {
+
+  @Rule
+  public ErrorCollector collector = new ErrorCollector();
+
+  private static LivyPySparkInterpreter interpreter;
+
+  @Mock(answer = Answers.RETURNS_DEEP_STUBS)
+  private InterpreterContext interpreterContext;
+
+  @AfterClass
+  public static void tearDown() {
+    interpreter.close();
+  }
+
+  @Before
+  public void prepareContext() throws Exception {
+    interpreter = new LivyPySparkInterpreter(new Properties());
+    interpreter.userSessionMap = new HashMap<>();
+    interpreter.userSessionMap.put(null, 0);
+    interpreter.livyHelper = Mockito.mock(LivyHelper.class);
+    interpreter.open();
+
+    doReturn(new InterpreterResult(InterpreterResult.Code.SUCCESS)).when(interpreter.livyHelper)
+        .interpret("print \"x is 1.\"", interpreterContext, interpreter.userSessionMap);
+  }
+
+  @Test
+  public void checkInitVariables() throws Exception {
+    collector.checkThat("Check that, if userSessionMap is made: ",
+        interpreter.userSessionMap, CoreMatchers.notNullValue());
+  }
+
+  @Test
+  public void checkBasicInterpreter() throws Exception {
+
+    String paragraphString = "print \"x is 1.\"";
+
+    final InterpreterResult actual = interpreter.interpret(paragraphString, interpreterContext);
+
+    collector.checkThat("Check that, result is computed: ",
+        actual.code(), CoreMatchers.equalTo(InterpreterResult.Code.SUCCESS));
+    assertThat(actual).isNotNull();
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0af86e39/livy/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/livy/src/test/resources/log4j.properties b/livy/src/test/resources/log4j.properties
new file mode 100644
index 0000000..2b6d987
--- /dev/null
+++ b/livy/src/test/resources/log4j.properties
@@ -0,0 +1,24 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger = INFO, stdout
+
+log4j.appender.stdout = org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
+
+log4j.logger.org.apache.zeppelin.livy=DEBUG