You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mo...@apache.org on 2017/09/09 01:09:17 UTC
zeppelin git commit: ZEPPELIN-204 make scala code completion work
again
Repository: zeppelin
Updated Branches:
refs/heads/master bf7ada0e2 -> f46244d32
ZEPPELIN-204 make scala code completion work again
### What is this PR for?
Spark Scala interpreter didn't show any useful completion proposals (besides some keywords)
### What type of PR is it?
Bug Fix
### Todos
### What is the Jira issue?
https://issues.apache.org/jira/browse/ZEPPELIN-204
### How should this be tested?
Open a note with spark interpreter. Write some (multiline) code. Open code completion with ctrl-.
There are also some additional unit tests...
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Pascal Pellmont <gi...@ppo2.ch>
Closes #2563 from pellmont/master and squashes the following commits:
5f8d07c [Pascal Pellmont] ZEPPELIN-204 fixed completion for scala 2.11 < 2.11.8
ba52604 [Pascal Pellmont] ZEPPELIN-204 different treatment for scala 2.10
4b594da [Pascal Pellmont] Merge branch 'master' of git@github.com:apache/zeppelin.git
94858b5 [Pascal Pellmont] ZEPPELIN-204 make scala code completion work again
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/f46244d3
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/f46244d3
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/f46244d3
Branch: refs/heads/master
Commit: f46244d32049acc3857762de58f34f7f15ba7049
Parents: bf7ada0
Author: Pascal Pellmont <gi...@ppo2.ch>
Authored: Thu Sep 7 20:48:29 2017 +0200
Committer: Lee moon soo <mo...@apache.org>
Committed: Fri Sep 8 18:08:52 2017 -0700
----------------------------------------------------------------------
.../apache/zeppelin/spark/SparkInterpreter.java | 38 ++++++++-----
.../java/org/apache/zeppelin/spark/Utils.java | 57 ++++++++++++++++----
.../zeppelin/spark/SparkInterpreterTest.java | 16 ++++++
3 files changed, 88 insertions(+), 23 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f46244d3/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index 670314b..fd12a72 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -1111,23 +1111,33 @@ public class SparkInterpreter extends Interpreter {
if (buf.length() < cursor) {
cursor = buf.length();
}
- String completionText = getCompletionTargetString(buf, cursor);
- if (completionText == null) {
- completionText = "";
- cursor = completionText.length();
- }
ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, "completer");
- Candidates ret = c.complete(completionText, cursor);
-
- List<String> candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
- List<InterpreterCompletion> completions = new LinkedList<>();
-
- for (String candidate : candidates) {
- completions.add(new InterpreterCompletion(candidate, candidate, StringUtils.EMPTY));
+
+ if (Utils.isScala2_10() || !Utils.isCompilerAboveScala2_11_7()) {
+ String singleToken = getCompletionTargetString(buf, cursor);
+ Candidates ret = c.complete(singleToken, singleToken.length());
+
+ List<String> candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
+ List<InterpreterCompletion> completions = new LinkedList<>();
+
+ for (String candidate : candidates) {
+ completions.add(new InterpreterCompletion(candidate, candidate, StringUtils.EMPTY));
+ }
+
+ return completions;
+ } else {
+ Candidates ret = c.complete(buf, cursor);
+
+ List<String> candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
+ List<InterpreterCompletion> completions = new LinkedList<>();
+
+ for (String candidate : candidates) {
+ completions.add(new InterpreterCompletion(candidate, candidate, StringUtils.EMPTY));
+ }
+
+ return completions;
}
-
- return completions;
}
private String getCompletionTargetString(String text, int cursor) {
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f46244d3/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
index 6448c97..82bf210 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
@@ -24,18 +24,22 @@ import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
+import java.util.Properties;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
/**
* Utility and helper functions for the Spark Interpreter
*/
class Utils {
public static Logger logger = LoggerFactory.getLogger(Utils.class);
+ private static final String SCALA_COMPILER_VERSION = evaluateScalaCompilerVersion();
static Object invokeMethod(Object o, String name) {
return invokeMethod(o, name, new Class[]{}, new Object[]{});
}
- static Object invokeMethod(Object o, String name, Class[] argTypes, Object[] params) {
+ static Object invokeMethod(Object o, String name, Class<?>[] argTypes, Object[] params) {
try {
return o.getClass().getMethod(name, argTypes).invoke(o, params);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
@@ -44,7 +48,7 @@ class Utils {
return null;
}
- static Object invokeStaticMethod(Class c, String name, Class[] argTypes, Object[] params) {
+ static Object invokeStaticMethod(Class<?> c, String name, Class<?>[] argTypes, Object[] params) {
try {
return c.getMethod(name, argTypes).invoke(null, params);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
@@ -53,17 +57,17 @@ class Utils {
return null;
}
- static Object invokeStaticMethod(Class c, String name) {
+ static Object invokeStaticMethod(Class<?> c, String name) {
return invokeStaticMethod(c, name, new Class[]{}, new Object[]{});
}
- static Class findClass(String name) {
+ static Class<?> findClass(String name) {
return findClass(name, false);
}
- static Class findClass(String name, boolean silence) {
+ static Class<?> findClass(String name, boolean silence) {
try {
- return Utils.class.forName(name);
+ return Class.forName(name);
} catch (ClassNotFoundException e) {
if (!silence) {
logger.error(e.getMessage(), e);
@@ -72,7 +76,7 @@ class Utils {
}
}
- static Object instantiateClass(String name, Class[] argTypes, Object[] params) {
+ static Object instantiateClass(String name, Class<?>[] argTypes, Object[] params) {
try {
Constructor<?> constructor = Utils.class.getClassLoader()
.loadClass(name).getConstructor(argTypes);
@@ -87,7 +91,7 @@ class Utils {
// function works after intp is initialized
static boolean isScala2_10() {
try {
- Utils.class.forName("org.apache.spark.repl.SparkIMain");
+ Class.forName("org.apache.spark.repl.SparkIMain");
return true;
} catch (ClassNotFoundException e) {
return false;
@@ -99,10 +103,45 @@ class Utils {
static boolean isScala2_11() {
return !isScala2_10();
}
+
+ static boolean isCompilerAboveScala2_11_7() {
+ if (isScala2_10() || SCALA_COMPILER_VERSION == null) {
+ return false;
+ }
+ Pattern p = Pattern.compile("([0-9]+)[.]([0-9]+)[.]([0-9]+)");
+ Matcher m = p.matcher(SCALA_COMPILER_VERSION);
+ if (m.matches()) {
+ int major = Integer.parseInt(m.group(1));
+ int minor = Integer.parseInt(m.group(2));
+ int bugfix = Integer.parseInt(m.group(3));
+ return (major > 2 || (major == 2 && minor > 11) || (major == 2 && minor == 11 && bugfix > 7));
+ }
+ return false;
+ }
+
+ private static String evaluateScalaCompilerVersion() {
+ String version = null;
+ try {
+ Properties p = new Properties();
+ Class<?> completionClass = findClass("scala.tools.nsc.interpreter.JLineCompletion");
+ if (completionClass != null) {
+ try (java.io.InputStream in = completionClass.getClass()
+ .getResourceAsStream("/compiler.properties")) {
+ p.load(in);
+ version = p.getProperty("version.number");
+ } catch (java.io.IOException e) {
+ logger.error("Failed to evaluate Scala compiler version", e);
+ }
+ }
+ } catch (RuntimeException e) {
+ logger.error("Failed to evaluate Scala compiler version", e);
+ }
+ return version;
+ }
static boolean isSpark2() {
try {
- Utils.class.forName("org.apache.spark.sql.SparkSession");
+ Class.forName("org.apache.spark.sql.SparkSession");
return true;
} catch (ClassNotFoundException e) {
return false;
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f46244d3/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index a939942..ece292b 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -313,6 +313,22 @@ public class SparkInterpreterTest {
}
@Test
+ public void testMultilineCompletion() {
+ String buf = "val x = 1\nsc.";
+ List<InterpreterCompletion> completions = repl.completion(buf, buf.length(), null);
+ assertTrue(completions.size() > 0);
+ }
+
+ @Test
+ public void testMultilineCompletionNewVar() {
+ Assume.assumeFalse("this feature does not work with scala 2.10", Utils.isScala2_10());
+ Assume.assumeTrue("This feature does not work with scala < 2.11.8", Utils.isCompilerAboveScala2_11_7());
+ String buf = "val x = sc\nx.";
+ List<InterpreterCompletion> completions = repl.completion(buf, buf.length(), null);
+ assertTrue(completions.size() > 0);
+ }
+
+ @Test
public void testParagraphUrls() {
String paraId = "test_para_job_url";
InterpreterContext intpCtx = new InterpreterContext("note", paraId, null, "title", "text",