You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mo...@apache.org on 2017/01/02 20:28:12 UTC
zeppelin git commit: [ZEPPELIN-1758] support livy pyspark
Interpreter's magic function
Repository: zeppelin
Updated Branches:
refs/heads/master 1e38a9b25 -> 355387a72
[ZEPPELIN-1758] support livy pyspark Interpreter's magic function
### What is this PR for?
support livy pyspark Interpreter's magic function
### What type of PR is it?
Improvement
### Todos
* [ ] - Task
### What is the Jira issue?
https://issues.apache.org/jira/browse/ZEPPELIN-1758
### How should this be tested?
test code
```
%livy.pyspark
t = [{"name":"userA", "role":"roleA"}, {"name":"userB", "role":"roleB"}, {"name":"userC"}]
%table t
```
```
%livy.sparkr
plot(iris, col = heat.colors(3))
```
### Screenshots (if appropriate)
![magic](https://cloud.githubusercontent.com/assets/16571121/21447042/c30933e2-c911-11e6-950b-b71864ecf0e7.png)
### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no
Author: purechoc <pu...@ncsoft.com>
Author: chrischo <pu...@gmail.com>
Closes #1729 from purechoc/support-livy-magic and squashes the following commits:
c4a0952 [chrischo] fixed valiable name
2cbc782 [chrischo] fixed valiable name
e3dcf42 [purechoc] fixed wrong code
5f26592 [purechoc] fixed wrong code
641f482 [purechoc] change to using StringUtils
0699e84 [purechoc] change to using StringBuilder
1cdd7ff [purechoc] add test in LivyInterpreterIT
56f1b78 [purechoc] refactoring
2af35b9 [purechoc] Merge remote-tracking branch 'upstream2/master' into support-livy-magic
c33ac76 [purechoc] fixed some wrong code
5fe38d2 [purechoc] support table magic
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/355387a7
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/355387a7
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/355387a7
Branch: refs/heads/master
Commit: 355387a72741c657960293dddebd1157f382ba3c
Parents: 1e38a9b
Author: purechoc <pu...@ncsoft.com>
Authored: Fri Dec 23 19:35:55 2016 +0900
Committer: Lee moon soo <mo...@apache.org>
Committed: Mon Jan 2 12:28:08 2017 -0800
----------------------------------------------------------------------
.../zeppelin/livy/BaseLivyInterprereter.java | 38 ++++++++++++++++++--
.../apache/zeppelin/livy/LivyInterpreterIT.java | 9 +++++
2 files changed, 45 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/355387a7/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterprereter.java
----------------------------------------------------------------------
diff --git a/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterprereter.java b/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterprereter.java
index a8e3127..0c8c8e2 100644
--- a/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterprereter.java
+++ b/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterprereter.java
@@ -229,7 +229,31 @@ public abstract class BaseLivyInterprereter extends Interpreter {
} else {
//TODO(zjffdu) support other types of data (like json, image and etc)
String result = stmtInfo.output.data.plain_text;
- if (result != null) {
+
+ // check table magic result first
+ if (stmtInfo.output.data.application_livy_table_json != null) {
+ StringBuilder outputBuilder = new StringBuilder();
+ boolean notFirstColumn = false;
+
+ for (Map header : stmtInfo.output.data.application_livy_table_json.headers) {
+ if (notFirstColumn) {
+ outputBuilder.append("\t");
+ }
+ outputBuilder.append(header.get("name"));
+ notFirstColumn = true;
+ }
+
+ outputBuilder.append("\n");
+ for (List<Object> row : stmtInfo.output.data.application_livy_table_json.records) {
+ outputBuilder.append(StringUtils.join(row, "\t"));
+ outputBuilder.append("\n");
+ }
+ return new InterpreterResult(InterpreterResult.Code.SUCCESS,
+ InterpreterResult.Type.TABLE, outputBuilder.toString());
+ } else if (stmtInfo.output.data.image_png != null) {
+ return new InterpreterResult(InterpreterResult.Code.SUCCESS,
+ InterpreterResult.Type.IMG, (String) stmtInfo.output.data.image_png);
+ } else if (result != null) {
result = result.trim();
if (result.startsWith("<link")
|| result.startsWith("<script")
@@ -238,6 +262,7 @@ public abstract class BaseLivyInterprereter extends Interpreter {
result = "%html " + result;
}
}
+
if (displayAppInfo) {
//TODO(zjffdu), use multiple InterpreterResult to display appInfo
StringBuilder outputBuilder = new StringBuilder();
@@ -439,6 +464,7 @@ public abstract class BaseLivyInterprereter extends Interpreter {
public String ename;
public String evalue;
public Object traceback;
+ public TableMagic tableMagic;
public boolean isError() {
return status.equals("error");
@@ -456,7 +482,15 @@ public abstract class BaseLivyInterprereter extends Interpreter {
@SerializedName("application/json")
public String application_json;
@SerializedName("application/vnd.livy.table.v1+json")
- public String application_livy_table_json;
+ public TableMagic application_livy_table_json;
+ }
+
+ private static class TableMagic {
+ @SerializedName("headers")
+ List<Map> headers;
+
+ @SerializedName("data")
+ List<List> records;
}
}
}
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/355387a7/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
----------------------------------------------------------------------
diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
index 0173f1d..8ca8842 100644
--- a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
+++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
@@ -294,6 +294,15 @@ public class LivyInterpreterIT {
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]"));
+ // test magic api
+ pysparkInterpreter.interpret("t = [{\"name\":\"userA\", \"role\":\"roleA\"},"
+ + "{\"name\":\"userB\", \"role\":\"roleB\"}]", context);
+ result = pysparkInterpreter.interpret("%table t", context);
+ assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+ assertEquals(1, result.message().size());
+ assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
+ assertTrue(result.message().get(0).getData().contains("userA"));
+
// error
result = pysparkInterpreter.interpret("print(a)", context);
assertEquals(InterpreterResult.Code.ERROR, result.code());