You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2019/05/17 07:54:59 UTC

[zeppelin] branch master updated: [ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 1ca7039  [ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393
1ca7039 is described below

commit 1ca7039f02c81fdaa41d185c67810fcf197da3a9
Author: Jeff Zhang <zj...@apache.org>
AuthorDate: Wed Apr 24 17:05:42 2019 +0800

    [ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393
    
    ### What is this PR for?
    This PR fix the issue of SPARK-22393 in zeppelin. We can fix this by using `SparkIMain` instead of `IMain`.
    
    ### What type of PR is it?
    [Bug Fix]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://jira.apache.org/jira/browse/ZEPPELIN-4132
    
    ### How should this be tested?
    * Unit test is added
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zj...@apache.org>
    
    Closes #3353 from zjffdu/ZEPPELIN-4132 and squashes the following commits:
    
    c94b34af2 [Jeff Zhang] [ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393
---
 .travis.yml                                                        | 2 +-
 .../java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java    | 7 +++++++
 .../scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala | 5 +++--
 3 files changed, 11 insertions(+), 3 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 7a9fedf..2335911 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -109,7 +109,7 @@ matrix:
     - sudo: required
       jdk: "oraclejdk8"
       dist: trusty
-      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.2 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.2 -Pscala-2.10 -Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
     # ZeppelinSparkClusterTest21, SparkIntegrationTest21, Unit test of Spark 2.1
     - sudo: required
diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
index 773deae..7faae2c 100644
--- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
+++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
@@ -164,6 +164,13 @@ public class NewSparkInterpreterTest {
         "val circle1 = new Circle(5.0)", getInterpreterContext());
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
 
+    // class extend
+    result = interpreter.interpret("import java.util.ArrayList", getInterpreterContext());
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+
+    result = interpreter.interpret("class MyArrayList extends ArrayList{}", getInterpreterContext());
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+
     // spark rdd operation
     result = interpreter.interpret("sc\n.range(1, 10)\n.sum", getInterpreterContext());
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
diff --git a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
index 0956e04..8465145 100644
--- a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
+++ b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
@@ -22,6 +22,7 @@ import java.net.URLClassLoader
 import java.nio.file.{Files, Paths}
 
 import org.apache.spark.SparkConf
+import org.apache.spark.repl.SparkILoop
 import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion
 import org.apache.zeppelin.interpreter.util.InterpreterOutputStream
 import org.apache.zeppelin.interpreter.{InterpreterContext, InterpreterResult}
@@ -43,7 +44,7 @@ class SparkScala211Interpreter(override val conf: SparkConf,
 
   lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
 
-  private var sparkILoop: ILoop = _
+  private var sparkILoop: SparkILoop = _
 
   override val interpreterOutput = new InterpreterOutputStream(LOGGER)
 
@@ -74,7 +75,7 @@ class SparkScala211Interpreter(override val conf: SparkConf,
     } else {
       new JPrintWriter(Console.out, true)
     }
-    sparkILoop = new ILoop(None, replOut)
+    sparkILoop = new SparkILoop(None, replOut)
     sparkILoop.settings = settings
     sparkILoop.createInterpreter()