You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2018/08/03 00:53:22 UTC
zeppelin git commit: ZEPPELIN-3679. Provide option to allow hide
spark ui in frontend
Repository: zeppelin
Updated Branches:
refs/heads/master c95d89684 -> 1dcfb9d26
ZEPPELIN-3679. Provide option to allow hide spark ui in frontend
### What is this PR for?
Introducing new property `zeppelin.spark.ui.hidden` to control whether hide spark ui in frontend. By default it is false.
### What type of PR is it?
[ Improvement]
### Todos
* [ ] - Task
### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-3679
### How should this be tested?
* Unit test is added
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Jeff Zhang <zj...@apache.org>
Closes #3109 from zjffdu/ZEPPELIN-3679 and squashes the following commits:
9c00a6dce [Jeff Zhang] ZEPPELIN-3679. Provide option to allow hide spark ui in frontend
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/1dcfb9d2
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/1dcfb9d2
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/1dcfb9d2
Branch: refs/heads/master
Commit: 1dcfb9d264a7cee2d809f475540f5d24a767353e
Parents: c95d896
Author: Jeff Zhang <zj...@apache.org>
Authored: Thu Aug 2 10:31:57 2018 +0800
Committer: Jeff Zhang <zj...@apache.org>
Committed: Fri Aug 3 08:53:17 2018 +0800
----------------------------------------------------------------------
.../zeppelin/spark/NewSparkInterpreter.java | 2 +-
.../zeppelin/spark/OldSparkInterpreter.java | 2 +-
.../src/main/resources/interpreter-setting.json | 7 +++++
.../zeppelin/spark/NewSparkInterpreterTest.java | 30 +++++++++++++++++++-
.../apache/zeppelin/spark/SparkShimsTest.java | 6 ++--
.../org/apache/zeppelin/spark/SparkShims.java | 23 +++++++++++----
.../org/apache/zeppelin/spark/Spark1Shims.java | 10 +++++--
.../org/apache/zeppelin/spark/Spark2Shims.java | 10 +++++--
8 files changed, 75 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
index 9ee504a..23e6dad 100644
--- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
+++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
@@ -120,7 +120,7 @@ public class NewSparkInterpreter extends AbstractSparkInterpreter {
if (!StringUtils.isBlank(sparkUrlProp)) {
sparkUrl = sparkUrlProp;
}
- sparkShims = SparkShims.getInstance(sc.version());
+ sparkShims = SparkShims.getInstance(sc.version(), getProperties());
sparkShims.setupSparkListener(sc.master(), sparkUrl, InterpreterContext.get());
z = new SparkZeppelinContext(sc, sparkShims, hooks,
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/interpreter/src/main/java/org/apache/zeppelin/spark/OldSparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/OldSparkInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/OldSparkInterpreter.java
index 0366e3b..6f157a0 100644
--- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/OldSparkInterpreter.java
+++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/OldSparkInterpreter.java
@@ -707,7 +707,7 @@ public class OldSparkInterpreter extends AbstractSparkInterpreter {
dep = getDependencyResolver();
hooks = getInterpreterGroup().getInterpreterHookRegistry();
sparkUrl = getSparkUIUrl();
- sparkShims = SparkShims.getInstance(sc.version());
+ sparkShims = SparkShims.getInstance(sc.version(), getProperties());
sparkShims.setupSparkListener(sc.master(), sparkUrl, InterpreterContext.get());
numReferenceOfSparkContext.incrementAndGet();
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/interpreter/src/main/resources/interpreter-setting.json
----------------------------------------------------------------------
diff --git a/spark/interpreter/src/main/resources/interpreter-setting.json b/spark/interpreter/src/main/resources/interpreter-setting.json
index 5746754..60224ce 100644
--- a/spark/interpreter/src/main/resources/interpreter-setting.json
+++ b/spark/interpreter/src/main/resources/interpreter-setting.json
@@ -81,6 +81,13 @@
"defaultValue": true,
"description": "Whether use new spark interpreter implementation",
"type": "checkbox"
+ },
+ "zeppelin.spark.ui.hidden": {
+ "envName": null,
+ "propertyName": "zeppelin.spark.ui.hidden",
+ "defaultValue": false,
+ "description": "Whether to hide spark ui in zeppelin ui",
+ "type": "checkbox"
}
},
"editor": {
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
index 9dc9d8f..ea19866 100644
--- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
+++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
@@ -53,6 +53,7 @@ import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
@@ -468,8 +469,9 @@ public class NewSparkInterpreterTest {
assertEquals(null, interpreter.getSparkContext().getLocalProperty("spark.scheduler.pool"));
}
+ // spark.ui.enabled: false
@Test
- public void testDisableSparkUI() throws InterpreterException {
+ public void testDisableSparkUI_1() throws InterpreterException {
Properties properties = new Properties();
properties.setProperty("spark.master", "local");
properties.setProperty("spark.app.name", "test");
@@ -492,6 +494,31 @@ public class NewSparkInterpreterTest {
verify(mockRemoteEventClient, never()).onParaInfosReceived(any(Map.class));
}
+ // zeppelin.spark.ui.hidden: true
+ @Test
+ public void testDisableSparkUI_2() throws InterpreterException {
+ Properties properties = new Properties();
+ properties.setProperty("spark.master", "local");
+ properties.setProperty("spark.app.name", "test");
+ properties.setProperty("zeppelin.spark.maxResult", "100");
+ properties.setProperty("zeppelin.spark.test", "true");
+ properties.setProperty("zeppelin.spark.useNew", "true");
+ properties.setProperty("zeppelin.spark.ui.hidden", "true");
+
+ interpreter = new SparkInterpreter(properties);
+ assertTrue(interpreter.getDelegation() instanceof NewSparkInterpreter);
+ interpreter.setInterpreterGroup(mock(InterpreterGroup.class));
+ InterpreterContext.set(getInterpreterContext());
+ interpreter.open();
+
+ InterpreterContext context = getInterpreterContext();
+ InterpreterResult result = interpreter.interpret("sc.range(1, 10).sum", context);
+ assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+
+ // spark job url is not sent
+ verify(mockRemoteEventClient, never()).onParaInfosReceived(any(Map.class));
+ }
+
@After
public void tearDown() throws InterpreterException {
if (this.interpreter != null) {
@@ -500,6 +527,7 @@ public class NewSparkInterpreterTest {
if (this.depInterpreter != null) {
this.depInterpreter.close();
}
+ SparkShims.reset();
}
private InterpreterContext getInterpreterContext() {
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java
----------------------------------------------------------------------
diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java
index fd47ce2..48d0055 100644
--- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java
+++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java
@@ -89,7 +89,7 @@ public class SparkShimsTest {
@Test
public void checkYarnVersionTest() {
SparkShims sparkShims =
- new SparkShims() {
+ new SparkShims(new Properties()) {
@Override
public void setupSparkListener(String master,
String sparkWebUrl,
@@ -121,9 +121,9 @@ public class SparkShimsTest {
when(mockContext.getIntpEventClient()).thenReturn(mockIntpEventClient);
doNothing().when(mockIntpEventClient).onParaInfosReceived(argumentCaptor.capture());
try {
- sparkShims = SparkShims.getInstance(SparkVersion.SPARK_2_0_0.toString());
+ sparkShims = SparkShims.getInstance(SparkVersion.SPARK_2_0_0.toString(), new Properties());
} catch (Throwable ignore) {
- sparkShims = SparkShims.getInstance(SparkVersion.SPARK_1_6_0.toString());
+ sparkShims = SparkShims.getInstance(SparkVersion.SPARK_1_6_0.toString(), new Properties());
}
}
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java
----------------------------------------------------------------------
diff --git a/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java b/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java
index 1287c57..efd65fc 100644
--- a/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java
+++ b/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java
@@ -17,6 +17,7 @@
package org.apache.zeppelin.spark;
+import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.util.VersionUtil;
import org.apache.zeppelin.interpreter.InterpreterContext;
@@ -48,7 +49,14 @@ public abstract class SparkShims {
private static SparkShims sparkShims;
- private static SparkShims loadShims(String sparkVersion) throws ReflectiveOperationException {
+ protected Properties properties;
+
+ public SparkShims(Properties properties) {
+ this.properties = properties;
+ }
+
+ private static SparkShims loadShims(String sparkVersion, Properties properties)
+ throws ReflectiveOperationException {
Class<?> sparkShimsClass;
if ("2".equals(sparkVersion)) {
LOGGER.info("Initializing shims for Spark 2.x");
@@ -58,15 +66,15 @@ public abstract class SparkShims {
sparkShimsClass = Class.forName("org.apache.zeppelin.spark.Spark1Shims");
}
- Constructor c = sparkShimsClass.getConstructor();
- return (SparkShims) c.newInstance();
+ Constructor c = sparkShimsClass.getConstructor(Properties.class);
+ return (SparkShims) c.newInstance(properties);
}
- public static SparkShims getInstance(String sparkVersion) {
+ public static SparkShims getInstance(String sparkVersion, Properties properties) {
if (sparkShims == null) {
String sparkMajorVersion = getSparkMajorVersion(sparkVersion);
try {
- sparkShims = loadShims(sparkMajorVersion);
+ sparkShims = loadShims(sparkMajorVersion, properties);
} catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
@@ -137,4 +145,9 @@ public abstract class SparkShims {
|| (VersionUtil.compareVersions(HADOOP_VERSION_3_0_0_ALPHA4, version) <= 0)
|| (VersionUtil.compareVersions(HADOOP_VERSION_3_0_0, version) <= 0);
}
+
+ @VisibleForTesting
+ public static void reset() {
+ sparkShims = null;
+ }
}
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java
----------------------------------------------------------------------
diff --git a/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java b/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java
index 3981204..c13ff9a 100644
--- a/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java
+++ b/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java
@@ -28,9 +28,14 @@ import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.ResultMessages;
import java.util.List;
+import java.util.Properties;
public class Spark1Shims extends SparkShims {
+ public Spark1Shims(Properties properties) {
+ super(properties);
+ }
+
public void setupSparkListener(final String master,
final String sparkWebUrl,
final InterpreterContext context) {
@@ -38,7 +43,8 @@ public class Spark1Shims extends SparkShims {
sc.addSparkListener(new JobProgressListener(sc.getConf()) {
@Override
public void onJobStart(SparkListenerJobStart jobStart) {
- if (sc.getConf().getBoolean("spark.ui.enabled", true)) {
+ if (sc.getConf().getBoolean("spark.ui.enabled", true) &&
+ !Boolean.parseBoolean(properties.getProperty("zeppelin.spark.ui.hidden", "false"))) {
buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), context);
}
}
@@ -59,7 +65,7 @@ public class Spark1Shims extends SparkShims {
for (Row row : rows) {
for (int i = 0; i < row.size(); ++i) {
msg.append(row.get(i));
- if (i != row.size() -1) {
+ if (i != row.size() - 1) {
msg.append("\t");
}
}
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1dcfb9d2/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java
----------------------------------------------------------------------
diff --git a/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java b/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java
index 5f0cf87..5043786 100644
--- a/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java
+++ b/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java
@@ -24,14 +24,18 @@ import org.apache.spark.scheduler.SparkListener;
import org.apache.spark.scheduler.SparkListenerJobStart;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
-import org.apache.spark.sql.SparkSession;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.ResultMessages;
import java.util.List;
+import java.util.Properties;
public class Spark2Shims extends SparkShims {
+ public Spark2Shims(Properties properties) {
+ super(properties);
+ }
+
public void setupSparkListener(final String master,
final String sparkWebUrl,
final InterpreterContext context) {
@@ -39,7 +43,9 @@ public class Spark2Shims extends SparkShims {
sc.addSparkListener(new SparkListener() {
@Override
public void onJobStart(SparkListenerJobStart jobStart) {
- if (sc.getConf().getBoolean("spark.ui.enabled", true)) {
+
+ if (sc.getConf().getBoolean("spark.ui.enabled", true) &&
+ !Boolean.parseBoolean(properties.getProperty("zeppelin.spark.ui.hidden", "false"))) {
buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), context);
}
}