You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mo...@apache.org on 2015/06/20 23:59:36 UTC

incubator-zeppelin git commit: ZEPPELIN-119: SparkILoop loadFiles is called before binder initialization

Repository: incubator-zeppelin
Updated Branches:
  refs/heads/master bcbc8537b -> c04440d22


ZEPPELIN-119: SparkILoop loadFiles is called before binder initialization

With this PR SparkILoop.loadFiles is invoked _after_ the 'binder' object is initializated. This way you can pass external init scripts to Spark to customize Spark's contexts.

Author: Luca Rosellini <lu...@gmail.com>

This patch had conflicts when merged, resolved by
Committer: Lee moon soo <mo...@apache.org>

Closes #107 from lucarosellini/bugfix/zeppelin-119 and squashes the following commits:

04fcd23 [Luca Rosellini] fixed ZEPPELIN-119


Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/c04440d2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/c04440d2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/c04440d2

Branch: refs/heads/master
Commit: c04440d22a36703558e9ced8fa2854ffa437782e
Parents: bcbc853
Author: Luca Rosellini <lu...@gmail.com>
Authored: Thu Jun 18 17:07:31 2015 +0200
Committer: Lee moon soo <mo...@apache.org>
Committed: Sat Jun 20 14:59:26 2015 -0700

----------------------------------------------------------------------
 .../apache/zeppelin/spark/SparkInterpreter.java | 37 ++++++++++----------
 1 file changed, 18 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/c04440d2/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index 1c4c5e7..b2aecb9 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -411,25 +411,6 @@ public class SparkInterpreter extends Interpreter {
     z = new ZeppelinContext(sc, sqlc, null, dep, printStream,
         Integer.parseInt(getProperty("zeppelin.spark.maxResult")));
 
-    try {
-      if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) {
-        Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      } else if (sc.version().startsWith("1.3")) {
-        Method loadFiles = this.interpreter.getClass().getMethod(
-            "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      } else if (sc.version().startsWith("1.4")) {
-        Method loadFiles = this.interpreter.getClass().getMethod(
-            "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      }
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      throw new InterpreterException(e);
-    }
-
-
     intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
     binder = (Map<String, Object>) getValue("_binder");
     binder.put("sc", sc);
@@ -469,6 +450,24 @@ public class SparkInterpreter extends Interpreter {
     intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
             Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
 
+    try {
+      if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) {
+        Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
+        loadFiles.invoke(this.interpreter, settings);
+      } else if (sc.version().startsWith("1.3")) {
+        Method loadFiles = this.interpreter.getClass().getMethod(
+                "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
+        loadFiles.invoke(this.interpreter, settings);
+      } else if (sc.version().startsWith("1.4")) {
+        Method loadFiles = this.interpreter.getClass().getMethod(
+                "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
+        loadFiles.invoke(this.interpreter, settings);
+      }
+    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+            | IllegalArgumentException | InvocationTargetException e) {
+      throw new InterpreterException(e);
+    }
+
     // add jar
     if (depInterpreter != null) {
       DependencyContext depc = depInterpreter.getDependencyContext();