You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2020/03/11 08:24:59 UTC

[zeppelin] branch master updated: [minor] Description update of interpreter-setting.json

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 78eeb56  [minor] Description update of interpreter-setting.json
78eeb56 is described below

commit 78eeb56a997a3d18f05f263cd1b7c1497f32eabe
Author: Jeff Zhang <zj...@apache.org>
AuthorDate: Wed Mar 11 16:24:50 2020 +0800

    [minor] Description update of interpreter-setting.json
---
 flink/src/main/resources/interpreter-setting.json  |  32 ++++++++++-----------
 python/src/main/resources/interpreter-setting.json |   6 ++--
 rlang/src/main/resources/interpreter-setting.json  |  10 +++----
 spark/interpreter/figure/unnamed-chunk-1-1.png     | Bin 403630 -> 0 bytes
 .../src/main/resources/interpreter-setting.json    |  30 ++++++-------------
 5 files changed, 32 insertions(+), 46 deletions(-)

diff --git a/flink/src/main/resources/interpreter-setting.json b/flink/src/main/resources/interpreter-setting.json
index ea92e45..3ca9736 100644
--- a/flink/src/main/resources/interpreter-setting.json
+++ b/flink/src/main/resources/interpreter-setting.json
@@ -9,21 +9,21 @@
         "envName": null,
         "propertyName": null,
         "defaultValue": "local",
-        "description": "execution mode, it could be local/remote/yarn/k8s",
+        "description": "Execution mode, it could be local|remote|yarn",
         "type": "string"
       },
       "flink.execution.remote.host": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "",
-        "description": "host name of running JobManager. Only used for remote mode",
+        "description": "Host name of running JobManager. Only used for remote mode",
         "type": "string"
       },
       "flink.execution.remote.port": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "",
-        "description": "port of running JobManager. Only used for remote mode",
+        "description": "Port of running JobManager. Only used for remote mode",
         "type": "number"
       },
       "flink.jm.memory": {
@@ -65,56 +65,56 @@
         "envName": null,
         "propertyName": null,
         "defaultValue": "default",
-        "description": "yarn queue name",
+        "description": "Yarn queue name",
         "type": "string"
       },
       "flink.webui.yarn.useProxy": {
         "envName": null,
         "propertyName": null,
         "defaultValue": false,
-        "description": "whether use yarn proxy url as flink weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004",
+        "description": "Whether use yarn proxy url as flink weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004",
         "type": "checkbox"
       },
       "flink.udf.jars": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "",
-        "description": "flink udf jars (comma separated), zeppelin will register udf for user automatically",
+        "description": "Flink udf jars (comma separated), Zeppelin will register udfs in this jar for user automatically",
         "type": "string"
       },
       "flink.execution.jars": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "",
-        "description": "additional user jars (comma separated)",
+        "description": "Additional user jars (comma separated)",
         "type": "string"
       },
       "flink.execution.packages": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "",
-        "description": "additional user packages (comma separated), e.g. flink connector packages",
+        "description": "Additional user packages (comma separated), e.g. flink connector packages",
         "type": "string"
       },
       "zeppelin.flink.scala.color": {
         "envName": null,
         "propertyName": null,
-        "defaultValue": "true",
-        "description": "whether display scala shell output in colorful format",
+        "defaultValue": true,
+        "description": "Whether display scala shell output in colorful format",
         "type": "checkbox"
       },
       "zeppelin.flink.enableHive": {
         "envName": null,
         "propertyName": null,
-        "defaultValue": "false",
-        "description": "whether enable hive",
+        "defaultValue": false,
+        "description": "Whether enable hive",
         "type": "checkbox"
       },
       "zeppelin.flink.hive.version": {
         "envName": null,
         "propertyName": null,
         "defaultValue": "2.3.4",
-        "description": "hive version that you would like to connect",
+        "description": "Hive version that you would like to connect",
         "type": "string"
       },
       "zeppelin.flink.printREPLOutput": {
@@ -128,21 +128,21 @@
         "envName": "zeppelin.flink.maxResult",
         "propertyName": "zeppelin.flink.maxResult",
         "defaultValue": "1000",
-        "description": "max number of row returned by sql interpreter.",
+        "description": "Max number of rows returned by sql interpreter.",
         "type": "number"
       },
       "zeppelin.pyflink.python": {
         "envName": "zeppelin.pyflink.python",
         "propertyName": "zeppelin.pyflink.python",
         "defaultValue": "python",
-        "description": "python executable for pyflink",
+        "description": "Python executable for pyflink",
         "type": "string"
       },
       "flink.interpreter.close.shutdown_cluster": {
         "envName": "flink.interpreter.close.shutdown_cluster",
         "propertyName": "flink.interpreter.close.shutdown_cluster",
         "defaultValue": true,
-        "description": "Whether shutdown application when close interpreter",
+        "description": "Whether shutdown flink cluster when close interpreter",
         "type": "checkbox"
       },
       "zeppelin.interpreter.close.cancel_job": {
diff --git a/python/src/main/resources/interpreter-setting.json b/python/src/main/resources/interpreter-setting.json
index 9fc3c7d..9897f02 100644
--- a/python/src/main/resources/interpreter-setting.json
+++ b/python/src/main/resources/interpreter-setting.json
@@ -9,7 +9,7 @@
         "envName": null,
         "propertyName": "zeppelin.python",
         "defaultValue": "python",
-        "description": "Python directory. It is set to python by default.(assume python is in your $PATH)",
+        "description": "Python binary executable path. It is set to python by default.(assume python is in your $PATH)",
         "type": "string"
       },
       "zeppelin.python.maxResult": {
@@ -22,7 +22,7 @@
       "zeppelin.python.useIPython": {
         "propertyName": "zeppelin.python.useIPython",
         "defaultValue": true,
-        "description": "whether use IPython when it is available",
+        "description": "Whether use IPython when it is available in `%python`",
         "type": "checkbox"
       }
     },
@@ -40,7 +40,7 @@
       "zeppelin.ipython.launch.timeout": {
         "propertyName": "zeppelin.ipython.launch.timeout",
         "defaultValue": "30000",
-        "description": "time out for ipython launch",
+        "description": "Time out for ipython launch",
         "type": "number"
       },
       "zeppelin.ipython.grpc.message_size": {
diff --git a/rlang/src/main/resources/interpreter-setting.json b/rlang/src/main/resources/interpreter-setting.json
index 1026067..24d1a48 100644
--- a/rlang/src/main/resources/interpreter-setting.json
+++ b/rlang/src/main/resources/interpreter-setting.json
@@ -8,21 +8,21 @@
         "envName": "ZEPPELIN_R_KNITR",
         "propertyName": "zeppelin.R.knitr",
         "defaultValue": true,
-        "description": "whether use knitr or not",
+        "description": "Whether use knitr or not",
         "type": "checkbox"
       },
       "zeppelin.R.cmd": {
         "envName": "ZEPPELIN_R_CMD",
         "propertyName": "zeppelin.R.cmd",
         "defaultValue": "R",
-        "description": "R repl path",
+        "description": "R binary executable path",
         "type": "string"
       },
       "zeppelin.R.image.width": {
         "envName": "ZEPPELIN_R_IMAGE_WIDTH",
         "propertyName": "zeppelin.R.image.width",
         "defaultValue": "100%",
-        "description": "",
+        "description": "Image width of R plotting",
         "type": "number"
       },
       "zeppelin.R.render.options": {
@@ -60,14 +60,14 @@
         "envName": "",
         "propertyName": "zeppelin.R.shiny.iframe_width",
         "defaultValue": "100%",
-        "description": "",
+        "description": "Width of iframe of R shiny app",
         "type": "text"
       },
       "zeppelin.R.shiny.iframe_height": {
         "envName": "",
         "propertyName": "zeppelin.R.shiny.iframe_height",
         "defaultValue": "500px",
-        "description": "",
+        "description": "Height of iframe of R shiny app",
         "type": "text"
       }
     },
diff --git a/spark/interpreter/figure/unnamed-chunk-1-1.png b/spark/interpreter/figure/unnamed-chunk-1-1.png
deleted file mode 100644
index e3ca4d1..0000000
Binary files a/spark/interpreter/figure/unnamed-chunk-1-1.png and /dev/null differ
diff --git a/spark/interpreter/src/main/resources/interpreter-setting.json b/spark/interpreter/src/main/resources/interpreter-setting.json
index 29a3644..13a5de6 100644
--- a/spark/interpreter/src/main/resources/interpreter-setting.json
+++ b/spark/interpreter/src/main/resources/interpreter-setting.json
@@ -16,7 +16,7 @@
         "envName": "MASTER",
         "propertyName": "spark.master",
         "defaultValue": "local[*]",
-        "description": "Spark master uri. ex) spark://master_host:7077",
+        "description": "Spark master uri. local | yarn-client | yarn-cluster | spark master address of standalone mode, ex) spark://master_host:7077",
         "type": "string"
       },
       "spark.app.name": {
@@ -50,7 +50,7 @@
       "spark.executor.memory": {
         "envName": null,
         "propertyName": "spark.executor.memory",
-        "defaultValue": "",
+        "defaultValue": "1g",
         "description": "Executor memory per worker instance. ex) 512m, 32g",
         "type": "string"
       },
@@ -102,7 +102,7 @@
         "envName": null,
         "propertyName": "zeppelin.spark.enableSupportedVersionCheck",
         "defaultValue": true,
-        "description": "Do not change - developer only setting, not for production use",
+        "description": "Whether checking supported spark version. Developer only setting, not for production use",
         "type": "checkbox"
       },
       "zeppelin.spark.uiWebUrl": {
@@ -116,7 +116,7 @@
         "envName": null,
         "propertyName": "zeppelin.spark.ui.hidden",
         "defaultValue": false,
-        "description": "Whether to hide spark ui in zeppelin ui",
+        "description": "Whether hide spark ui in zeppelin ui",
         "type": "checkbox"
       },
       "spark.webui.yarn.useProxy": {
@@ -130,14 +130,14 @@
         "envName": null,
         "propertyName": "zeppelin.spark.scala.color",
         "defaultValue": true,
-        "description": "Whether to enable color output of spark scala interpreter",
+        "description": "Whether enable color output of spark scala interpreter",
         "type": "checkbox"
       },
       "zeppelin.spark.deprecatedMsg.show": {
         "envName": null,
         "propertyName": "zeppelin.spark.deprecatedMsg.show",
         "defaultValue": true,
-        "description": "Whether show the spark deprecated message",
+        "description": "Whether show the spark deprecated message, spark 2.2 and before are deprecated. Zeppelin will display warning message by default",
         "type": "checkbox"
       }
     },
@@ -180,20 +180,6 @@
         "defaultValue": false,
         "description": "Enable ZeppelinContext variable interpolation into spark sql",
         "type": "checkbox"
-      },
-      "zeppelin.spark.maxResult": {
-        "envName": null,
-        "propertyName": "zeppelin.spark.maxResult",
-        "defaultValue": "1000",
-        "description": "Max number of Spark SQL result to display.",
-        "type": "number"
-      },
-      "zeppelin.spark.importImplicit": {
-        "envName": null,
-        "propertyName": "zeppelin.spark.importImplicit",
-        "defaultValue": true,
-        "description": "Import implicits, UDF collection, and sql if set true. true by default.",
-        "type": "checkbox"
       }
     },
     "editor": {
@@ -212,7 +198,7 @@
         "envName": "PYSPARK_PYTHON",
         "propertyName": "PYSPARK_PYTHON",
         "defaultValue": "python",
-        "description": "Python binary executable to use for PySpark in driver only (default is `PYSPARK_PYTHON`). Property <code>spark.pyspark.driver.python</code> take precedence if it is set",
+        "description": "Python binary executable to use for PySpark in both driver and workers (default is python2.7 if available, otherwise python). Property <code>spark.pyspark.python</code> take precedence if it is set",
         "type": "string"
       },
       "PYSPARK_DRIVER_PYTHON": {
@@ -272,7 +258,7 @@
         "envName": null,
         "propertyName": "zeppelin.R.image.width",
         "defaultValue": "100%",
-        "description": "",
+        "description": "Image width of R plotting",
         "type": "number"
       },
       "zeppelin.R.render.options": {