You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2020/03/23 07:14:35 UTC

[zeppelin] 03/03: [minor] update flink tutorial note

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/zeppelin.git

commit fdd08f0d85d37a8374c42dba43b9df8e6da97829
Author: Jeff Zhang <zj...@apache.org>
AuthorDate: Mon Mar 23 15:06:29 2020 +0800

    [minor] update flink tutorial note
---
 notebook/Flink Tutorial/Batch ETL_2EW19CSPA.zpln   |  43 +-------
 .../Exploratory Data Analytics_2EZ9G3JJU.zpln      | 112 +--------------------
 .../Flink Tutorial/Streaming ETL_2EYD56B9B.zpln    |  13 +--
 3 files changed, 5 insertions(+), 163 deletions(-)

diff --git a/notebook/Flink Tutorial/Batch ETL_2EW19CSPA.zpln b/notebook/Flink Tutorial/Batch ETL_2EW19CSPA.zpln
index e924796..27b4f3a 100644
--- a/notebook/Flink Tutorial/Batch ETL_2EW19CSPA.zpln	
+++ b/notebook/Flink Tutorial/Batch ETL_2EW19CSPA.zpln	
@@ -45,40 +45,6 @@
       "status": "FINISHED"
     },
     {
-      "text": "%flink.conf\n\nflink.interpreter.close.shutdown_cluster false",
-      "user": "anonymous",
-      "dateUpdated": "2020-02-25 09:33:04.023",
-      "config": {
-        "colWidth": 12.0,
-        "fontSize": 9.0,
-        "enabled": true,
-        "results": {},
-        "editorSetting": {
-          "language": "text",
-          "editOnDblClick": false,
-          "completionKey": "TAB",
-          "completionSupport": true
-        },
-        "editorMode": "ace/mode/text"
-      },
-      "settings": {
-        "params": {},
-        "forms": {}
-      },
-      "results": {
-        "code": "SUCCESS",
-        "msg": []
-      },
-      "apps": [],
-      "progressUpdateIntervalMs": 500,
-      "jobName": "paragraph_1582594367896_465966821",
-      "id": "paragraph_1582594367896_465966821",
-      "dateCreated": "2020-02-25 09:32:47.896",
-      "dateStarted": "2020-02-25 09:33:04.028",
-      "dateFinished": "2020-02-25 09:33:04.032",
-      "status": "FINISHED"
-    },
-    {
       "title": "Download bank data",
       "text": "%sh\n\ncd /tmp\nwget https://archive.ics.uci.edu/ml/machine-learning-databases/00222/bank.zip\ntar -xvf bank.zip\n# upload data to hdfs if you want to run it in yarn mode\n# hadoop fs -put /tmp/bank.csv /tmp/bank.csv\n",
       "user": "anonymous",
@@ -499,18 +465,11 @@
   "id": "2EW19CSPA",
   "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
-  "permissions": {
-    "owners": [],
-    "runners": [],
-    "readers": [],
-    "writers": []
-  },
   "noteParams": {},
   "noteForms": {},
   "angularObjects": {},
   "config": {
     "isZeppelinNotebookCronEnable": false
   },
-  "info": {},
-  "path": "/Flink Tutorial/Batch ETL"
+  "info": {}
 }
\ No newline at end of file
diff --git a/notebook/Flink Tutorial/Exploratory Data Analytics_2EZ9G3JJU.zpln b/notebook/Flink Tutorial/Exploratory Data Analytics_2EZ9G3JJU.zpln
index cd13d1c..8c6a041 100644
--- a/notebook/Flink Tutorial/Exploratory Data Analytics_2EZ9G3JJU.zpln	
+++ b/notebook/Flink Tutorial/Exploratory Data Analytics_2EZ9G3JJU.zpln	
@@ -26,15 +26,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "HTML",
-            "data": "\u003cdiv class\u003d\"markdown-body\"\u003e\n\u003cp\u003eThis tutorial demonstrates how to use Flink do data exploration analytics via its.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003ebatch sql\u003c/li\u003e\n\u003cli\u003eudf (scala, python \u0026amp; hive)\u003c/li\u003e\n\u003cli\u003eZeppelin\u0026rsquo;s dynamic forms and builtin visualization\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eWe the bank data registered in another tutorial note. You can also use [...]
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1579053946947_-1754951794",
@@ -120,15 +111,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "age\taval\n19\t4\n20\t3\n21\t7\n22\t9\n23\t20\n24\t24\n25\t44\n26\t77\n27\t94\n28\t103\n29\t97\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578043926895_1558885985",
@@ -223,15 +205,6 @@
           }
         }
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "age\tval\n19\t4\n20\t3\n21\t7\n22\t9\n23\t20\n24\t24\n25\t44\n26\t77\n27\t94\n28\t103\n29\t97\n30\t150\n31\t199\n32\t224\n33\t186\n34\t231\n35\t180\n36\t188\n37\t161\n38\t159\n39\t130\n40\t142\n41\t135\n42\t141\n43\t115\n44\t105\n45\t112\n46\t119\n47\t108\n48\t114\n49\t112\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578904010994_-1300955750",
@@ -337,15 +310,6 @@
           }
         }
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "age\tval\n23\t3\n24\t11\n25\t11\n26\t18\n27\t26\n28\t23\n29\t37\n30\t56\n31\t104\n32\t105\n33\t103\n34\t142\n35\t109\n36\t117\n37\t100\n38\t99\n39\t88\n40\t105\n41\t97\n42\t91\n43\t79\n44\t68\n45\t76\n46\t82\n47\t78\n48\t91\n49\t87\n50\t74\n51\t63\n52\t66\n53\t75\n54\t56\n55\t68\n56\t50\n57\t78\n58\t67\n59\t56\n60\t36\n61\t15\n62\t5\n63\t7\n64\t6\n65\t4\n66\t7\n67\t5\n68\t1\n69\t5\n70\t5\n71\t5\n72\t4\n73\t6\n74\t2\n75\t3\n76\t1\n77\t5\n78\t2\n79\t3\n80\t6\n81\t1\n83 [...]
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578904047823_-1212655985",
@@ -380,15 +344,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TEXT",
-            "data": "defined class ScalaUpper\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905208609_-1175158458",
@@ -423,10 +378,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": []
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905223714_1228156151",
@@ -467,15 +418,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TEXT",
-            "data": "The following commands are available:\n\nCREATE TABLE\t\tCreate table under current catalog and database.\nDROP TABLE\t\tDrop table with optional catalog and database. Syntax: \u0027DROP TABLE [IF EXISTS] \u003cname\u003e;\u0027\nCREATE VIEW\t\tCreates a virtual table from a SQL query. Syntax: \u0027CREATE VIEW \u003cname\u003e AS \u003cquery\u003e;\u0027\nDESCRIBE\t\tDescribes the schema of a table with the given name.\nDROP VIEW\t\tDeletes a previously created virt [...]
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580872625746_1221957461",
@@ -561,15 +503,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "EXPR$0\tEXPR$1\nPRIMARY\t678\nSECONDARY\t2306\nTERTIARY\t1350\nUNKNOWN\t187\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905241045_-678553675",
@@ -668,15 +601,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "edu\tc\nPRIMARY\t678\nSECONDARY\t2306\nTERTIARY\t1350\nUNKNOWN\t187\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578905276271_782994324",
@@ -765,15 +689,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "tab_name\nbank\nbank_raw\ndepartments\ndest_csv\ndest_orc\nemployee\nitems\nsink_kafka\nsource_csv\nsource_kafka\nweb_log_small\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580889919226_-1710921935",
@@ -834,15 +749,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "table\nUnnamedTable$0\nbank\nbank_raw\ndepartments\ndest_csv\ndest_orc\nemployee\nitems\nsink_kafka\nsource_csv\nsource_kafka\nweb_log_small\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1580890881367_1309558694",
@@ -906,15 +812,6 @@
         "params": {},
         "forms": {}
       },
-      "results": {
-        "code": "SUCCESS",
-        "msg": [
-          {
-            "type": "TABLE",
-            "data": "eid\tname\tsalary\tdestination\n1201\tGopal\t45000\tTechnical manager\n1202\tManisha\t45000\tProof reader\n1203\tMasthanvali\t40000\tTechnical writer\n1204\tKiran\t40000\tHr Admin\n1205\tKranthi\t30000\tOp Admin\n"
-          }
-        ]
-      },
       "apps": [],
       "progressUpdateIntervalMs": 500,
       "jobName": "paragraph_1578043607185_590649655",
@@ -945,18 +842,11 @@
   "id": "2EZ9G3JJU",
   "defaultInterpreterGroup": "flink",
   "version": "0.9.0-SNAPSHOT",
-  "permissions": {
-    "owners": [],
-    "runners": [],
-    "readers": [],
-    "writers": []
-  },
   "noteParams": {},
   "noteForms": {},
   "angularObjects": {},
   "config": {
     "isZeppelinNotebookCronEnable": false
   },
-  "info": {},
-  "path": "/Flink Tutorial/Exploratory Data Analytics"
+  "info": {}
 }
\ No newline at end of file
diff --git a/notebook/Flink Tutorial/Streaming ETL_2EYD56B9B.zpln b/notebook/Flink Tutorial/Streaming ETL_2EYD56B9B.zpln
index 06d5be2..d7d58f7 100644
--- a/notebook/Flink Tutorial/Streaming ETL_2EYD56B9B.zpln	
+++ b/notebook/Flink Tutorial/Streaming ETL_2EYD56B9B.zpln	
@@ -4,10 +4,10 @@
       "title": "Overview",
       "text": "%md\n\nThis tutorial demonstrate how to use Flink do streaming processing via its streaming sql + udf. In this tutorial, we read data from kafka queue and do some simple processing (just filtering here) and then write it back to another kafka queue. We use this [docker](https://kafka-connect-datagen.readthedocs.io/en/latest/) to create kafka cluster and source data \n\n",
       "user": "anonymous",
-      "dateUpdated": "2020-01-19 15:36:08.418",
+      "dateUpdated": "2020-03-23 15:03:54.596",
       "config": {
         "runOnSelectionChange": true,
-        "title": false,
+        "title": true,
         "checkEmpty": true,
         "colWidth": 12.0,
         "fontSize": 9.0,
@@ -324,18 +324,11 @@
   "id": "2EYD56B9B",
   "defaultInterpreterGroup": "spark",
   "version": "0.9.0-SNAPSHOT",
-  "permissions": {
-    "owners": [],
-    "runners": [],
-    "readers": [],
-    "writers": []
-  },
   "noteParams": {},
   "noteForms": {},
   "angularObjects": {},
   "config": {
     "isZeppelinNotebookCronEnable": false
   },
-  "info": {},
-  "path": "/Flink Tutorial/Streaming ETL"
+  "info": {}
 }
\ No newline at end of file