You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2017/04/03 08:52:47 UTC

zeppelin git commit: ZEPPELIN-2324. Add property zeppelin.spark.enableSupportedVersionCheck for trying new spark version

Repository: zeppelin
Updated Branches:
  refs/heads/master 20b72758b -> e7d41c349


ZEPPELIN-2324. Add property zeppelin.spark.enableSupportedVersionCheck for trying new spark version

### What is this PR for?
For now, every time when I want to try new spark version, I have to change file `SparkVersion.java` and rebuild it. It is not so convenient, so I'd like to add property `zeppelin.spark. enableSupportedVersionCheck` for spark interpreter. So that I can try new spark version by setting this property as false, of course it is only for zeppelin developer.

### What type of PR is it?
[Improvement]

### Todos
* [ ] - Task

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-2324

### How should this be tested?
Verify it in spark master

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zj...@apache.org>

Closes #2197 from zjffdu/ZEPPELIN-2324 and squashes the following commits:

f821839 [Jeff Zhang] address comments
4193ee3 [Jeff Zhang] ZEPPELIN-2324. Add property zeppelin.spark.unSupportedVersionCheck for trying new spark version


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/e7d41c34
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/e7d41c34
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/e7d41c34

Branch: refs/heads/master
Commit: e7d41c34973d0370f41f213d6d75001682f80786
Parents: 20b7275
Author: Jeff Zhang <zj...@apache.org>
Authored: Thu Mar 30 13:46:49 2017 +0800
Committer: Jeff Zhang <zj...@apache.org>
Committed: Mon Apr 3 16:52:50 2017 +0800

----------------------------------------------------------------------
 docs/interpreter/spark.md                               |  5 +++++
 .../org/apache/zeppelin/spark/PySparkInterpreter.java   |  5 ++---
 .../org/apache/zeppelin/spark/SparkInterpreter.java     | 12 +++++++++---
 .../org/apache/zeppelin/spark/SparkRInterpreter.java    |  4 ++++
 .../org/apache/zeppelin/spark/SparkSqlInterpreter.java  |  2 +-
 spark/src/main/resources/interpreter-setting.json       |  6 ++++++
 .../src/main/sparkr-resources/interpreter-setting.json  |  6 ++++++
 7 files changed, 33 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/docs/interpreter/spark.md
----------------------------------------------------------------------
diff --git a/docs/interpreter/spark.md b/docs/interpreter/spark.md
index a19eda2..59b3430 100644
--- a/docs/interpreter/spark.md
+++ b/docs/interpreter/spark.md
@@ -140,6 +140,11 @@ You can also set other Spark properties which are not listed in the table. For a
     <td>true</td>
     <td>Import implicits, UDF collection, and sql if set true.</td>
   </tr>
+  <tr>
+    <td>zeppelin.spark.enableSupportedVersionCheck</td>
+    <td>true</td>
+    <td>Do not change - developer only setting, not for production use</td>
+  </tr>
 </table>
 
 Without any configuration, Spark interpreter works out of box in local mode. But if you want to connect to your Spark cluster, you'll need to follow below two simple steps.

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
index db52a53..6e957ed 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
@@ -337,7 +337,7 @@ public class PySparkInterpreter extends Interpreter implements ExecuteResultHand
   public InterpreterResult interpret(String st, InterpreterContext context) {
     SparkInterpreter sparkInterpreter = getSparkInterpreter();
     sparkInterpreter.populateSparkWebUrl(context);
-    if (sparkInterpreter.getSparkVersion().isUnsupportedVersion()) {
+    if (sparkInterpreter.isUnsupportedSparkVersion()) {
       return new InterpreterResult(Code.ERROR, "Spark "
           + sparkInterpreter.getSparkVersion().toString() + " is not supported");
     }
@@ -466,8 +466,7 @@ public class PySparkInterpreter extends Interpreter implements ExecuteResultHand
 
     //start code for completion
     SparkInterpreter sparkInterpreter = getSparkInterpreter();
-    if (sparkInterpreter.getSparkVersion().isUnsupportedVersion() == false
-            && pythonscriptRunning == false) {
+    if (sparkInterpreter.isUnsupportedSparkVersion() || pythonscriptRunning == false) {
       return new LinkedList<>();
     }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index f1c5676..6a2f3cc 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -43,7 +43,6 @@ import org.apache.spark.repl.SparkILoop;
 import org.apache.spark.scheduler.ActiveJob;
 import org.apache.spark.scheduler.DAGScheduler;
 import org.apache.spark.scheduler.Pool;
-import org.apache.spark.scheduler.SparkListenerApplicationEnd;
 import org.apache.spark.scheduler.SparkListenerJobStart;
 import org.apache.spark.sql.SQLContext;
 import org.apache.spark.ui.SparkUI;
@@ -128,7 +127,7 @@ public class SparkInterpreter extends Interpreter {
   private static File outputDir;          // class outputdir for scala 2.11
   private Object classServer;      // classserver for scala 2.11
   private JavaSparkContext jsc;
-
+  private boolean enableSupportedVersionCheck;
 
   public SparkInterpreter(Properties property) {
     super(property);
@@ -609,6 +608,9 @@ public class SparkInterpreter extends Interpreter {
 
   @Override
   public void open() {
+    this.enableSupportedVersionCheck = java.lang.Boolean.parseBoolean(
+            property.getProperty("zeppelin.spark.enableSupportedVersionCheck", "true"));
+
     // set properties and do login before creating any spark stuff for secured cluster
     if (isYarnMode()) {
       System.setProperty("SPARK_YARN_MODE", "true");
@@ -1153,12 +1155,16 @@ public class SparkInterpreter extends Interpreter {
     return obj;
   }
 
+  boolean isUnsupportedSparkVersion() {
+    return enableSupportedVersionCheck  && sparkVersion.isUnsupportedVersion();
+  }
+
   /**
    * Interpret a single line.
    */
   @Override
   public InterpreterResult interpret(String line, InterpreterContext context) {
-    if (sparkVersion.isUnsupportedVersion()) {
+    if (isUnsupportedSparkVersion()) {
       return new InterpreterResult(Code.ERROR, "Spark " + sparkVersion.toString()
           + " is not supported");
     }

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/java/org/apache/zeppelin/spark/SparkRInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkRInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkRInterpreter.java
index 75687d0..c6b0796 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkRInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkRInterpreter.java
@@ -108,6 +108,10 @@ public class SparkRInterpreter extends Interpreter {
 
     SparkInterpreter sparkInterpreter = getSparkInterpreter();
     sparkInterpreter.populateSparkWebUrl(interpreterContext);
+    if (sparkInterpreter.isUnsupportedSparkVersion()) {
+      return new InterpreterResult(InterpreterResult.Code.ERROR, "Spark "
+          + sparkInterpreter.getSparkVersion().toString() + " is not supported");
+    }
 
     String jobGroup = Utils.buildJobGroupId(interpreterContext);
     sparkInterpreter.getSparkContext().setJobGroup(jobGroup, "Zeppelin", false);

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
index 1d5282f..d42eb50 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
@@ -87,7 +87,7 @@ public class SparkSqlInterpreter extends Interpreter {
     SQLContext sqlc = null;
     SparkInterpreter sparkInterpreter = getSparkInterpreter();
 
-    if (sparkInterpreter.getSparkVersion().isUnsupportedVersion()) {
+    if (sparkInterpreter.isUnsupportedSparkVersion()) {
       return new InterpreterResult(Code.ERROR, "Spark "
           + sparkInterpreter.getSparkVersion().toString() + " is not supported");
     }

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/resources/interpreter-setting.json
----------------------------------------------------------------------
diff --git a/spark/src/main/resources/interpreter-setting.json b/spark/src/main/resources/interpreter-setting.json
index 2b78d16..c8acc2f 100644
--- a/spark/src/main/resources/interpreter-setting.json
+++ b/spark/src/main/resources/interpreter-setting.json
@@ -53,6 +53,12 @@
         "propertyName": "spark.master",
         "defaultValue": "local[*]",
         "description": "Spark master uri. ex) spark://masterhost:7077"
+      },
+      "zeppelin.spark.unSupportedVersionCheck": {
+        "envName": null,
+        "propertyName": "zeppelin.spark.enableSupportedVersionCheck",
+        "defaultValue": "true",
+        "description": "Do not change - developer only setting, not for production use"
       }
     },
     "editor": {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e7d41c34/spark/src/main/sparkr-resources/interpreter-setting.json
----------------------------------------------------------------------
diff --git a/spark/src/main/sparkr-resources/interpreter-setting.json b/spark/src/main/sparkr-resources/interpreter-setting.json
index 338c861..6953b20 100644
--- a/spark/src/main/sparkr-resources/interpreter-setting.json
+++ b/spark/src/main/sparkr-resources/interpreter-setting.json
@@ -53,6 +53,12 @@
         "propertyName": "spark.master",
         "defaultValue": "local[*]",
         "description": "Spark master uri. ex) spark://masterhost:7077"
+      },
+      "zeppelin.spark.unSupportedVersionCheck": {
+        "envName": null,
+        "propertyName": "zeppelin.spark.enableSupportedVersionCheck",
+        "defaultValue": "true",
+        "description": "Do not change - developer only setting, not for production use"
       }
     },
     "editor": {