You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by sj...@apache.org on 2020/07/28 13:32:56 UTC

[flink-web] 06/06: rebuild website

This is an automated email from the ASF dual-hosted git repository.

sjwiesman pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git

commit a125d4c0daf2856d5baa7477cb87d95fcd281369
Author: Seth Wiesman <sj...@gmail.com>
AuthorDate: Tue Jul 28 08:32:31 2020 -0500

    rebuild website
---
 ...ql-demo-building-e2e-streaming-application.html | 536 +++++++++++++++++++++
 content/blog/page10/index.html                     |  42 +-
 content/blog/page11/index.html                     |  40 +-
 content/blog/page12/index.html                     |  45 +-
 content/blog/{page6 => page13}/index.html          | 164 +------
 content/blog/page2/index.html                      |  38 +-
 content/blog/page3/index.html                      |  40 +-
 content/blog/page4/index.html                      |  40 +-
 content/blog/page5/index.html                      |  40 +-
 content/blog/page6/index.html                      |  42 +-
 content/blog/page7/index.html                      |  45 +-
 content/blog/page8/index.html                      |  43 +-
 content/blog/page9/index.html                      |  40 +-
 13 files changed, 836 insertions(+), 319 deletions(-)

diff --git a/content/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html b/content/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html
new file mode 100644
index 0000000..9b4f9e1
--- /dev/null
+++ b/content/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html
@@ -0,0 +1,536 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
+    <title>Apache Flink: Flink SQL Demo: Building an End-to-End Streaming Application</title>
+    <link rel="shortcut icon" href="/favicon.ico" type="image/x-icon">
+    <link rel="icon" href="/favicon.ico" type="image/x-icon">
+
+    <!-- Bootstrap -->
+    <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.4.1/css/bootstrap.min.css">
+    <link rel="stylesheet" href="/css/flink.css">
+    <link rel="stylesheet" href="/css/syntax.css">
+
+    <!-- Blog RSS feed -->
+    <link href="/blog/feed.xml" rel="alternate" type="application/rss+xml" title="Apache Flink Blog: RSS feed" />
+
+    <!-- jQuery (necessary for Bootstrap's JavaScript plugins) -->
+    <!-- We need to load Jquery in the header for custom google analytics event tracking-->
+    <script src="/js/jquery.min.js"></script>
+
+    <!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
+    <!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
+    <!--[if lt IE 9]>
+      <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
+      <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
+    <![endif]-->
+  </head>
+  <body>  
+    
+
+    <!-- Main content. -->
+    <div class="container">
+    <div class="row">
+
+      
+     <div id="sidebar" class="col-sm-3">
+        
+
+<!-- Top navbar. -->
+    <nav class="navbar navbar-default">
+        <!-- The logo. -->
+        <div class="navbar-header">
+          <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">
+            <span class="icon-bar"></span>
+            <span class="icon-bar"></span>
+            <span class="icon-bar"></span>
+          </button>
+          <div class="navbar-logo">
+            <a href="/">
+              <img alt="Apache Flink" src="/img/flink-header-logo.svg" width="147px" height="73px">
+            </a>
+          </div>
+        </div><!-- /.navbar-header -->
+
+        <!-- The navigation links. -->
+        <div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
+          <ul class="nav navbar-nav navbar-main">
+
+            <!-- First menu section explains visitors what Flink is -->
+
+            <!-- What is Stream Processing? -->
+            <!--
+            <li><a href="/streamprocessing1.html">What is Stream Processing?</a></li>
+            -->
+
+            <!-- What is Flink? -->
+            <li><a href="/flink-architecture.html">What is Apache Flink?</a></li>
+
+            
+            <ul class="nav navbar-nav navbar-subnav">
+              <li >
+                  <a href="/flink-architecture.html">Architecture</a>
+              </li>
+              <li >
+                  <a href="/flink-applications.html">Applications</a>
+              </li>
+              <li >
+                  <a href="/flink-operations.html">Operations</a>
+              </li>
+            </ul>
+            
+
+            <!-- What is Stateful Functions? -->
+
+            <li><a href="/stateful-functions.html">What is Stateful Functions?</a></li>
+
+            <!-- Use cases -->
+            <li><a href="/usecases.html">Use Cases</a></li>
+
+            <!-- Powered by -->
+            <li><a href="/poweredby.html">Powered By</a></li>
+
+
+            &nbsp;
+            <!-- Second menu section aims to support Flink users -->
+
+            <!-- Downloads -->
+            <li><a href="/downloads.html">Downloads</a></li>
+
+            <!-- Getting Started -->
+            <li class="dropdown">
+              <a class="dropdown-toggle" data-toggle="dropdown" href="#">Getting Started<span class="caret"></span></a>
+              <ul class="dropdown-menu">
+                <li><a href="https://ci.apache.org/projects/flink/flink-docs-release-1.11/getting-started/index.html" target="_blank">With Flink <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+                <li><a href="https://ci.apache.org/projects/flink/flink-statefun-docs-release-2.1/getting-started/project-setup.html" target="_blank">With Flink Stateful Functions <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+                <li><a href="/training.html">Training Course</a></li>
+              </ul>
+            </li>
+
+            <!-- Documentation -->
+            <li class="dropdown">
+              <a class="dropdown-toggle" data-toggle="dropdown" href="#">Documentation<span class="caret"></span></a>
+              <ul class="dropdown-menu">
+                <li><a href="https://ci.apache.org/projects/flink/flink-docs-release-1.11" target="_blank">Flink 1.11 (Latest stable release) <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+                <li><a href="https://ci.apache.org/projects/flink/flink-docs-master" target="_blank">Flink Master (Latest Snapshot) <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+                <li><a href="https://ci.apache.org/projects/flink/flink-statefun-docs-release-2.1" target="_blank">Flink Stateful Functions 2.1 (Latest stable release) <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+                <li><a href="https://ci.apache.org/projects/flink/flink-statefun-docs-master" target="_blank">Flink Stateful Functions Master (Latest Snapshot) <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+              </ul>
+            </li>
+
+            <!-- getting help -->
+            <li><a href="/gettinghelp.html">Getting Help</a></li>
+
+            <!-- Blog -->
+            <li><a href="/blog/"><b>Flink Blog</b></a></li>
+
+
+            <!-- Flink-packages -->
+            <li>
+              <a href="https://flink-packages.org" target="_blank">flink-packages.org <small><span class="glyphicon glyphicon-new-window"></span></small></a>
+            </li>
+            &nbsp;
+
+            <!-- Third menu section aim to support community and contributors -->
+
+            <!-- Community -->
+            <li><a href="/community.html">Community &amp; Project Info</a></li>
+
+            <!-- Roadmap -->
+            <li><a href="/roadmap.html">Roadmap</a></li>
+
+            <!-- Contribute -->
+            <li><a href="/contributing/how-to-contribute.html">How to Contribute</a></li>
+            
+
+            <!-- GitHub -->
+            <li>
+              <a href="https://github.com/apache/flink" target="_blank">Flink on GitHub <small><span class="glyphicon glyphicon-new-window"></span></small></a>
+            </li>
+
+            &nbsp;
+
+            <!-- Language Switcher -->
+            <li>
+              
+                
+                  <a href="/zh/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">中文版</a>
+                
+              
+            </li>
+
+          </ul>
+
+          <ul class="nav navbar-nav navbar-bottom">
+          <hr />
+
+            <!-- Twitter -->
+            <li><a href="https://twitter.com/apacheflink" target="_blank">@ApacheFlink <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+
+            <!-- Visualizer -->
+            <li class=" hidden-md hidden-sm"><a href="/visualizer/" target="_blank">Plan Visualizer <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+
+          <hr />
+
+            <li><a href="https://apache.org" target="_blank">Apache Software Foundation <small><span class="glyphicon glyphicon-new-window"></span></small></a></li>
+
+            <li>
+              <style>
+                .smalllinks:link {
+                  display: inline-block !important; background: none; padding-top: 0px; padding-bottom: 0px; padding-right: 0px; min-width: 75px;
+                }
+              </style>
+
+              <a class="smalllinks" href="https://www.apache.org/licenses/" target="_blank">License</a> <small><span class="glyphicon glyphicon-new-window"></span></small>
+
+              <a class="smalllinks" href="https://www.apache.org/security/" target="_blank">Security</a> <small><span class="glyphicon glyphicon-new-window"></span></small>
+
+              <a class="smalllinks" href="https://www.apache.org/foundation/sponsorship.html" target="_blank">Donate</a> <small><span class="glyphicon glyphicon-new-window"></span></small>
+
+              <a class="smalllinks" href="https://www.apache.org/foundation/thanks.html" target="_blank">Thanks</a> <small><span class="glyphicon glyphicon-new-window"></span></small>
+            </li>
+
+          </ul>
+        </div><!-- /.navbar-collapse -->
+    </nav>
+
+      </div>
+      <div class="col-sm-9">
+      <div class="row-fluid">
+  <div class="col-sm-12">
+    <div class="row">
+      <h1>Flink SQL Demo: Building an End-to-End Streaming Application</h1>
+      <p><i></i></p>
+
+      <article>
+        <p>28 Jul 2020 Jark Wu (<a href="https://twitter.com/JarkWu">@JarkWu</a>)</p>
+
+<p>Apache Flink 1.11 has released many exciting new features, including many developments in Flink SQL which is evolving at a fast pace. This article takes a closer look at how to quickly build streaming applications with Flink SQL from a practical point of view.</p>
+
+<p>In the following sections, we describe how to integrate Kafka, MySQL, Elasticsearch, and Kibana with Flink SQL to analyze e-commerce user behavior in real-time. All exercises in this blogpost are performed in the Flink SQL CLI, and the entire process uses standard SQL syntax, without a single line of Java/Scala code or IDE installation. The final result of this demo is shown in the following figure:</p>
+
+<center>
+<img src="/img/blog/2020-05-03-flink-sql-demo/image1.gif" width="650px" alt="Demo Overview" />
+</center>
+<p><br /></p>
+
+<h1 id="preparation">Preparation</h1>
+
+<p>Prepare a Linux or MacOS computer with Docker installed.</p>
+
+<h2 id="starting-the-demo-environment">Starting the Demo Environment</h2>
+
+<p>The components required in this demo are all managed in containers, so we will use <code>docker-compose</code> to start them. First, download the <code>docker-compose.yml</code> file that defines the demo environment, for example by running the following commands:</p>
+
+<div class="highlight"><pre><code>mkdir flink-sql-demo; cd flink-sql-demo;
+wget https://raw.githubusercontent.com/wuchong/flink-sql-demo/v1.11-EN/docker-compose.yml
+</code></pre></div>
+
+<p>The Docker Compose environment consists of the following containers:</p>
+
+<ul>
+  <li><strong>Flink SQL CLI:</strong> used to submit queries and visualize their results.</li>
+  <li><strong>Flink Cluster:</strong> a Flink JobManager and a Flink TaskManager container to execute queries.</li>
+  <li><strong>MySQL:</strong> MySQL 5.7 and a pre-populated <code>category</code> table in the database. The <code>category</code> table will be joined with data in Kafka to enrich the real-time data.</li>
+  <li><strong>Kafka:</strong> mainly used as a data source. The DataGen component automatically writes data into a Kafka topic.</li>
+  <li><strong>Zookeeper:</strong> this component is required by Kafka.</li>
+  <li><strong>Elasticsearch:</strong> mainly used as a data sink.</li>
+  <li><strong>Kibana:</strong> used to visualize the data in Elasticsearch.</li>
+  <li><strong>DataGen:</strong> the data generator. After the container is started, user behavior data is automatically generated and sent to the Kafka topic. By default, 2000 data entries are generated each second for about 1.5 hours. You can modify DataGen’s <code>speedup</code> parameter in <code>docker-compose.yml</code> to adjust the generation rate (which takes effect after Docker Compose is restarted).</li>
+</ul>
+
+<div class="alert alert-danger">
+  <p><span class="label label-danger" style="display: inline-block"> Note </span>
+Before starting the containers, we recommend configuring Docker so that sufficient resources are available and the environment does not become unresponsive. We suggest running Docker at 3-4 GB memory and 3-4 CPU cores.</p>
+</div>
+
+<p>To start all containers, run the following command in the directory that contains the <code>docker-compose.yml</code> file.</p>
+
+<div class="highlight"><pre><code>docker-compose up -d
+</code></pre></div>
+
+<p>This command automatically starts all the containers defined in the Docker Compose configuration in a detached mode. Run <code>docker ps</code> to check whether the 9 containers are running properly. You can also visit <a href="http://localhost:5601/">http://localhost:5601/</a> to see if Kibana is running normally.</p>
+
+<p>Don’t forget to run the following command to stop all containers after you finished the tutorial:</p>
+
+<div class="highlight"><pre><code>docker-compose down
+</code></pre></div>
+
+<h2 id="entering-the-flink-sql-cli-client">Entering the Flink SQL CLI client</h2>
+
+<p>To enter the SQL CLI client run:</p>
+
+<div class="highlight"><pre><code class="language-bash">docker-compose <span class="nb">exec </span>sql-client ./sql-client.sh</code></pre></div>
+
+<p>The command starts the SQL CLI client in the container.
+You should see the welcome screen of the CLI client.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image3.png" width="500px" alt="Flink SQL CLI welcome page" />
+</center>
+<p><br /></p>
+
+<h2 id="creating-a-kafka-table-using-ddl">Creating a Kafka table using DDL</h2>
+
+<p>The DataGen container continuously writes events into the Kafka <code>user_behavior</code> topic. This data contains the user behavior on the day of November 27, 2017 (behaviors include “click”, “like”, “purchase” and “add to shopping cart” events). Each row represents a user behavior event, with the user ID, product ID, product category ID, event type, and timestamp in JSON format. Note that the dataset is from the <a href="https://tianchi.aliyun.com/dataset/dataDetail?dataId=649">Al [...]
+
+<p>In the directory that contains <code>docker-compose.yml</code>, run the following command to view the first 10 data entries generated in the Kafka topic:</p>
+
+<div class="highlight"><pre><code>docker-compose exec kafka bash -c 'kafka-console-consumer.sh --topic user_behavior --bootstrap-server kafka:9094 --from-beginning --max-messages 10'
+
+{"user_id": "952483", "item_id":"310884", "category_id": "4580532", "behavior": "pv", "ts": "2017-11-27T00:00:00Z"}
+{"user_id": "794777", "item_id":"5119439", "category_id": "982926", "behavior": "pv", "ts": "2017-11-27T00:00:00Z"}
+...
+</code></pre></div>
+
+<p>In order to make the events in the Kafka topic accessible to Flink SQL, we run the following DDL statement in SQL CLI to create a table that connects to the topic in the Kafka cluster:</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">TABLE</span> <span class="n">user_behavior</span> <span class="p">(</span>
+    <span class="n">user_id</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="n">item_id</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="n">category_id</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="n">behavior</span> <span class="n">STRING</span><span class="p">,</span>
+    <span class="n">ts</span> <span class="k">TIMESTAMP</span><span class="p">(</span><span class="mi">3</span><span class="p">),</span>
+    <span class="n">proctime</span> <span class="k">AS</span> <span class="n">PROCTIME</span><span class="p">(),</span>   <span class="c1">-- generates processing-time attribute using computed column</span>
+    <span class="n">WATERMARK</span> <span class="k">FOR</span> <span class="n">ts</span> <span class="k">AS</span> <span class="n">ts</span> <span class="o">-</span> <span class="nb">INTERVAL</span> <span class="s1">&#39;5&#39;</span> <span class="k">SECOND</span>  <span class="c1">-- defines watermark on ts column, marks ts as event-time attribute</span>
+<span class="p">)</span> <span class="k">WITH</span> <span class="p">(</span>
+    <span class="s1">&#39;connector&#39;</span> <span class="o">=</span> <span class="s1">&#39;kafka&#39;</span><span class="p">,</span>  <span class="c1">-- using kafka connector</span>
+    <span class="s1">&#39;topic&#39;</span> <span class="o">=</span> <span class="s1">&#39;user_behavior&#39;</span><span class="p">,</span>  <span class="c1">-- kafka topic</span>
+    <span class="s1">&#39;scan.startup.mode&#39;</span> <span class="o">=</span> <span class="s1">&#39;earliest-offset&#39;</span><span class="p">,</span>  <span class="c1">-- reading from the beginning</span>
+    <span class="s1">&#39;properties.bootstrap.servers&#39;</span> <span class="o">=</span> <span class="s1">&#39;kafka:9094&#39;</span><span class="p">,</span>  <span class="c1">-- kafka broker address</span>
+    <span class="s1">&#39;format&#39;</span> <span class="o">=</span> <span class="s1">&#39;json&#39;</span>  <span class="c1">-- the data format is json</span>
+<span class="p">);</span></code></pre></div>
+
+<p>The above snippet declares five fields based on the data format. In addition, it uses the computed column syntax and built-in <code>PROCTIME()</code> function to declare a virtual column that generates the processing-time attribute. It also uses the <code>WATERMARK</code> syntax to declare the watermark strategy on the <code>ts</code> field (tolerate 5-seconds out-of-order). Therefore, the <code>ts</code> field becomes an event-time attribute. For more information about time attribute [...]
+
+<ul>
+  <li><a href="https://ci.apache.org/projects/flink/flink-docs-release-1.11/dev/table/streaming/time_attributes.html">Time attributes in Flink’s Table API &amp; SQL</a></li>
+  <li><a href="https://ci.apache.org/projects/flink/flink-docs-release-1.11/dev/table/sql/create.html#create-table">DDL Syntax in Flink SQL</a></li>
+</ul>
+
+<p>After creating the <code>user_behavior</code> table in the SQL CLI, run <code>SHOW TABLES;</code> and <code>DESCRIBE user_behavior;</code> to see registered tables and table details. Also, run the command <code>SELECT * FROM user_behavior;</code> directly in the SQL CLI to preview the data (press <code>q</code> to exit).</p>
+
+<p>Next, we discover more about Flink SQL through three real-world scenarios.</p>
+
+<h1 id="hourly-trading-volume">Hourly Trading Volume</h1>
+
+<h2 id="creating-an-elasticsearch-table-using-ddl">Creating an Elasticsearch table using DDL</h2>
+
+<p>Let’s create an Elasticsearch result table in the SQL CLI. We need two columns in this case: <code>hour_of_day</code> and <code>buy_cnt</code> (trading volume).</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">TABLE</span> <span class="n">buy_cnt_per_hour</span> <span class="p">(</span>
+    <span class="n">hour_of_day</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="n">buy_cnt</span> <span class="nb">BIGINT</span>
+<span class="p">)</span> <span class="k">WITH</span> <span class="p">(</span>
+    <span class="s1">&#39;connector&#39;</span> <span class="o">=</span> <span class="s1">&#39;elasticsearch-7&#39;</span><span class="p">,</span> <span class="c1">-- using elasticsearch connector</span>
+    <span class="s1">&#39;hosts&#39;</span> <span class="o">=</span> <span class="s1">&#39;http://elasticsearch:9200&#39;</span><span class="p">,</span>  <span class="c1">-- elasticsearch address</span>
+    <span class="s1">&#39;index&#39;</span> <span class="o">=</span> <span class="s1">&#39;buy_cnt_per_hour&#39;</span>  <span class="c1">-- elasticsearch index name, similar to database table name</span>
+<span class="p">);</span></code></pre></div>
+
+<p>There is no need to create the <code>buy_cnt_per_hour</code> index in Elasticsearch in advance since Elasticsearch will automatically create the index if it does not exist.</p>
+
+<h2 id="submitting-a-query">Submitting a Query</h2>
+
+<p>The hourly trading volume is the number of “buy” behaviors completed each hour. Therefore, we can use a <code>TUMBLE</code> window function to assign data into hourly windows. Then, we count the number of “buy” records in each window. To implement this, we can filter out the “buy” data first and then apply <code>COUNT(*)</code>.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">INSERT</span> <span class="k">INTO</span> <span class="n">buy_cnt_per_hour</span>
+<span class="k">SELECT</span> <span class="n">HOUR</span><span class="p">(</span><span class="n">TUMBLE_START</span><span class="p">(</span><span class="n">ts</span><span class="p">,</span> <span class="nb">INTERVAL</span> <span class="s1">&#39;1&#39;</span> <span class="n">HOUR</span><span class="p">)),</span> <span class="k">COUNT</span><span class="p">(</span><span class="o">*</span><span class="p">)</span>
+<span class="k">FROM</span> <span class="n">user_behavior</span>
+<span class="k">WHERE</span> <span class="n">behavior</span> <span class="o">=</span> <span class="s1">&#39;buy&#39;</span>
+<span class="k">GROUP</span> <span class="k">BY</span> <span class="n">TUMBLE</span><span class="p">(</span><span class="n">ts</span><span class="p">,</span> <span class="nb">INTERVAL</span> <span class="s1">&#39;1&#39;</span> <span class="n">HOUR</span><span class="p">);</span></code></pre></div>
+
+<p>Here, we use the built-in <code>HOUR</code> function to extract the value for each hour in the day from a <code>TIMESTAMP</code> column. Use <code>INSERT INTO</code> to start a Flink SQL job that continuously writes results into the Elasticsearch <code>buy_cnt_per_hour</code> index. The Elasticearch result table can be seen as a materialized view of the query. You can find more information about Flink’s window aggregation in the <a href="https://ci.apache.org/projects/flink/flink-docs [...]
+
+<p>After running the previous query in the Flink SQL CLI, we can observe the submitted task on the <a href="http://localhost:8081">Flink Web UI</a>. This task is a streaming task and therefore runs continuously.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image4.jpg" width="800px" alt="Flink Dashboard" />
+</center>
+<p><br /></p>
+
+<h2 id="using-kibana-to-visualize-results">Using Kibana to Visualize Results</h2>
+
+<p>Access Kibana at <a href="http://localhost:5601">http://localhost:5601</a>. First, configure an index pattern by clicking “Management” in the left-side toolbar and find “Index Patterns”. Next, click “Create Index Pattern” and enter the full index name <code>buy_cnt_per_hour</code> to create the index pattern. After creating the index pattern, we can explore data in Kibana.</p>
+
+<div class="alert alert-info">
+  <p><span class="label label-info" style="display: inline-block"><span class="glyphicon glyphicon-info-sign" aria-hidden="true"></span> Note </span>
+Since we are using the TUMBLE window of one hour here, it might take about four minutes between the time that containers started and until the first row is emitted. Until then the index does not exist and Kibana is unable to find the index.</p>
+</div>
+
+<p>Click “Discover” in the left-side toolbar. Kibana lists the content of the created index.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image5.jpg" width="800px" alt="Kibana Discover" />
+</center>
+<p><br /></p>
+
+<p>Next, create a dashboard to display various views. Click “Dashboard” on the left side of the page to create a dashboard named “User Behavior Analysis”. Then, click “Create New” to create a new view. Select “Area” (area graph), then select the <code>buy_cnt_per_hour</code> index, and draw the trading volume area chart as illustrated in the configuration on the left side of the following diagram. Apply the changes by clicking the “▶” play button. Then, save it as “Hourly Trading Volume”.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image6.jpg" width="800px" alt="Hourly Trading Volume" />
+</center>
+<p><br /></p>
+
+<p>You can see that during the early morning hours the number of transactions have the lowest value for the entire day.</p>
+
+<p>As real-time data is added into the indices, you can enable auto-refresh in Kibana to see real-time visualization changes and updates. You can do so by clicking the time picker and entering a refresh interval (e.g. 3 seconds) in the “Refresh every” field.</p>
+
+<h1 id="cumulative-number-of-unique-visitors-every-10-min">Cumulative number of Unique Visitors every 10-min</h1>
+
+<p>Another interesting visualization is the cumulative number of unique visitors (UV). For example, the number of UV at 10:00 represents the total number of UV from 00:00 to 10:00. Therefore, the curve is monotonically increasing.</p>
+
+<p>Let’s create another Elasticsearch table in the SQL CLI to store the UV results. This table contains 3 columns: date, time and cumulative UVs.
+The <code>date_str</code> and <code>time_str</code> column are defined as primary key, Elasticsearch sink will use them to calculate the document ID and work in upsert mode to update UV values under the document ID.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">TABLE</span> <span class="n">cumulative_uv</span> <span class="p">(</span>
+    <span class="n">date_str</span> <span class="n">STRING</span><span class="p">,</span>
+    <span class="n">time_str</span> <span class="n">STRING</span><span class="p">,</span>
+    <span class="n">uv</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="k">PRIMARY</span> <span class="k">KEY</span> <span class="p">(</span><span class="n">date_str</span><span class="p">,</span> <span class="n">time_str</span><span class="p">)</span> <span class="k">NOT</span> <span class="n">ENFORCED</span>
+<span class="p">)</span> <span class="k">WITH</span> <span class="p">(</span>
+    <span class="s1">&#39;connector&#39;</span> <span class="o">=</span> <span class="s1">&#39;elasticsearch-7&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;hosts&#39;</span> <span class="o">=</span> <span class="s1">&#39;http://elasticsearch:9200&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;index&#39;</span> <span class="o">=</span> <span class="s1">&#39;cumulative_uv&#39;</span>
+<span class="p">);</span></code></pre></div>
+
+<p>We can extract the date and time using <code>DATE_FORMAT</code> function based on the <code>ts</code> field. As the section title describes, we only need to report every 10 minutes. So, we can use <code>SUBSTR</code> and the string concat function <code>||</code> to convert the time value into a 10-minute interval time string, such as <code>12:00</code>, <code>12:10</code>.
+Next, we group data by <code>date_str</code> and perform a <code>COUNT DISTINCT</code> aggregation on <code>user_id</code> to get the current cumulative UV in this day. Additionally, we perform a <code>MAX</code> aggregation on <code>time_str</code> field to get the current stream time: the maximum event time observed so far.
+As the maximum time is also a part of the primary key of the sink, the final result is that we will insert a new point into the elasticsearch every 10 minute. And every latest point will be updated continuously until the next 10-minute point is generated.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">INSERT</span> <span class="k">INTO</span> <span class="n">cumulative_uv</span>
+<span class="k">SELECT</span> <span class="n">date_str</span><span class="p">,</span> <span class="k">MAX</span><span class="p">(</span><span class="n">time_str</span><span class="p">),</span> <span class="k">COUNT</span><span class="p">(</span><span class="k">DISTINCT</span> <span class="n">user_id</span><span class="p">)</span> <span class="k">as</span> <span class="n">uv</span>
+<span class="k">FROM</span> <span class="p">(</span>
+  <span class="k">SELECT</span>
+    <span class="n">DATE_FORMAT</span><span class="p">(</span><span class="n">ts</span><span class="p">,</span> <span class="s1">&#39;yyyy-MM-dd&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">date_str</span><span class="p">,</span>
+    <span class="n">SUBSTR</span><span class="p">(</span><span class="n">DATE_FORMAT</span><span class="p">(</span><span class="n">ts</span><span class="p">,</span> <span class="s1">&#39;HH:mm&#39;</span><span class="p">),</span><span class="mi">1</span><span class="p">,</span><span class="mi">4</span><span class="p">)</span> <span class="o">||</span> <span class="s1">&#39;0&#39;</span> <span class="k">as</span> <span class="n">time_str</span><span class="p">,</span>
+    <span class="n">user_id</span>
+  <span class="k">FROM</span> <span class="n">user_behavior</span><span class="p">)</span>
+<span class="k">GROUP</span> <span class="k">BY</span> <span class="n">date_str</span><span class="p">;</span></code></pre></div>
+
+<p>After submitting this query, we create a <code>cumulative_uv</code> index pattern in Kibana. We then create a “Line” (line graph) on the dashboard, by selecting the <code>cumulative_uv</code> index, and drawing the cumulative UV curve according to the configuration on the left side of the following figure before finally saving the curve.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image7.jpg" width="800px" alt="Cumulative Unique Visitors every 10-min" />
+</center>
+<p><br /></p>
+
+<h1 id="top-categories">Top Categories</h1>
+
+<p>The last visualization represents the category rankings to inform us on the most popular categories in our e-commerce site. Since our data source offers events for more than 5,000 categories without providing any additional significance to our analytics, we would like to reduce it so that it only includes the top-level categories. We will use the data in our MySQL database by joining it as a dimension table with our Kafka events to map sub-categories to top-level categories.</p>
+
+<p>Create a table in the SQL CLI to make the data in MySQL accessible to Flink SQL.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">TABLE</span> <span class="n">category_dim</span> <span class="p">(</span>
+    <span class="n">sub_category_id</span> <span class="nb">BIGINT</span><span class="p">,</span>
+    <span class="n">parent_category_name</span> <span class="n">STRING</span>
+<span class="p">)</span> <span class="k">WITH</span> <span class="p">(</span>
+    <span class="s1">&#39;connector&#39;</span> <span class="o">=</span> <span class="s1">&#39;jdbc&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;url&#39;</span> <span class="o">=</span> <span class="s1">&#39;jdbc:mysql://mysql:3306/flink&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;table-name&#39;</span> <span class="o">=</span> <span class="s1">&#39;category&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;username&#39;</span> <span class="o">=</span> <span class="s1">&#39;root&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;password&#39;</span> <span class="o">=</span> <span class="s1">&#39;123456&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;lookup.cache.max-rows&#39;</span> <span class="o">=</span> <span class="s1">&#39;5000&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;lookup.cache.ttl&#39;</span> <span class="o">=</span> <span class="s1">&#39;10min&#39;</span>
+<span class="p">);</span></code></pre></div>
+
+<p>The underlying JDBC connector implements the <code>LookupTableSource</code> interface, so the created JDBC table <code>category_dim</code> can be used as a temporal table (i.e. lookup table) out-of-the-box in the data enrichment.</p>
+
+<p>In addition, create an Elasticsearch table to store the category statistics.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">TABLE</span> <span class="n">top_category</span> <span class="p">(</span>
+    <span class="n">category_name</span> <span class="n">STRING</span> <span class="k">PRIMARY</span> <span class="k">KEY</span> <span class="k">NOT</span> <span class="n">ENFORCED</span><span class="p">,</span>
+    <span class="n">buy_cnt</span> <span class="nb">BIGINT</span>
+<span class="p">)</span> <span class="k">WITH</span> <span class="p">(</span>
+    <span class="s1">&#39;connector&#39;</span> <span class="o">=</span> <span class="s1">&#39;elasticsearch-7&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;hosts&#39;</span> <span class="o">=</span> <span class="s1">&#39;http://elasticsearch:9200&#39;</span><span class="p">,</span>
+    <span class="s1">&#39;index&#39;</span> <span class="o">=</span> <span class="s1">&#39;top_category&#39;</span>
+<span class="p">);</span></code></pre></div>
+
+<p>In order to enrich the category names, we use Flink SQL’s temporal table joins to join a dimension table. You can access more information about <a href="https://ci.apache.org/projects/flink/flink-docs-release-1.11/dev/table/streaming/joins.html#join-with-a-temporal-table">temporal joins</a> in the Flink documentation.</p>
+
+<p>Additionally, we use the <code>CREATE VIEW</code> syntax to register the query as a logical view, allowing us to easily reference this query in subsequent queries and simplify nested queries. Please note that creating a logical view does not trigger the execution of the job and the view results are not persisted. Therefore, this statement is lightweight and does not have additional overhead.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">CREATE</span> <span class="k">VIEW</span> <span class="n">rich_user_behavior</span> <span class="k">AS</span>
+<span class="k">SELECT</span> <span class="n">U</span><span class="p">.</span><span class="n">user_id</span><span class="p">,</span> <span class="n">U</span><span class="p">.</span><span class="n">item_id</span><span class="p">,</span> <span class="n">U</span><span class="p">.</span><span class="n">behavior</span><span class="p">,</span> <span class="k">C</span><span class="p">.</span><span class="n">parent_category_name</span> <span class="k">as</span> <span class="n">category_name</span>
+<span class="k">FROM</span> <span class="n">user_behavior</span> <span class="k">AS</span> <span class="n">U</span> <span class="k">LEFT</span> <span class="k">JOIN</span> <span class="n">category_dim</span> <span class="k">FOR</span> <span class="n">SYSTEM_TIME</span> <span class="k">AS</span> <span class="k">OF</span> <span class="n">U</span><span class="p">.</span><span class="n">proctime</span> <span class="k">AS</span> <span class="k">C</span>
+<span class="k">ON</span> <span class="n">U</span><span class="p">.</span><span class="n">category_id</span> <span class="o">=</span> <span class="k">C</span><span class="p">.</span><span class="n">sub_category_id</span><span class="p">;</span></code></pre></div>
+
+<p>Finally, we group the dimensional table by category name to count the number of <code>buy</code> events and write the result to Elasticsearch’s <code>top_category</code> index.</p>
+
+<div class="highlight"><pre><code class="language-sql"><span class="k">INSERT</span> <span class="k">INTO</span> <span class="n">top_category</span>
+<span class="k">SELECT</span> <span class="n">category_name</span><span class="p">,</span> <span class="k">COUNT</span><span class="p">(</span><span class="o">*</span><span class="p">)</span> <span class="n">buy_cnt</span>
+<span class="k">FROM</span> <span class="n">rich_user_behavior</span>
+<span class="k">WHERE</span> <span class="n">behavior</span> <span class="o">=</span> <span class="s1">&#39;buy&#39;</span>
+<span class="k">GROUP</span> <span class="k">BY</span> <span class="n">category_name</span><span class="p">;</span></code></pre></div>
+
+<p>After submitting the query, we create a <code>top_category</code> index pattern in Kibana. We then  create a “Horizontal Bar” (bar graph) on the dashboard, by selecting the <code>top_category</code> index and drawing the category ranking according to the configuration on the left side of the following diagram before finally saving the list.</p>
+
+<center>
+<img src="/img/blog/2020-07-28-flink-sql-demo/image8.jpg" width="800px" alt="Top Categories" />
+</center>
+<p><br /></p>
+
+<p>As illustrated in the diagram, the categories of clothing and shoes exceed by far other categories on the e-commerce website.</p>
+
+<hr />
+
+<p>We have now implemented three practical applications and created charts for them. We can now return to the dashboard page and drag-and-drop each view to give our dashboard a more formal and intuitive style, as illustrated in the beginning of the blogpost. Of course, Kibana also provides a rich set of graphics and visualization features, and the user_behavior logs contain a lot more interesting information to explore. Using Flink SQL, you can analyze data in more dimensions, while usin [...]
+
+<h1 id="summary">Summary</h1>
+
+<p>In the previous sections, we described how to use Flink SQL to integrate Kafka, MySQL, Elasticsearch, and Kibana to quickly build a real-time analytics application. The entire process can be completed using standard SQL syntax, without a line of Java or Scala code. We hope that this article provides some clear and practical examples of the convenience and power of Flink SQL, featuring an easy connection to various external systems, native support for event time and out-of-order handli [...]
+
+      </article>
+    </div>
+
+    <div class="row">
+      <div id="disqus_thread"></div>
+      <script type="text/javascript">
+        /* * * CONFIGURATION VARIABLES: EDIT BEFORE PASTING INTO YOUR WEBPAGE * * */
+        var disqus_shortname = 'stratosphere-eu'; // required: replace example with your forum shortname
+
+        /* * * DON'T EDIT BELOW THIS LINE * * */
+        (function() {
+            var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
+            dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
+             (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
+        })();
+      </script>
+    </div>
+  </div>
+</div>
+      </div>
+    </div>
+
+    <hr />
+
+    <div class="row">
+      <div class="footer text-center col-sm-12">
+        <p>Copyright © 2014-2019 <a href="http://apache.org">The Apache Software Foundation</a>. All Rights Reserved.</p>
+        <p>Apache Flink, Flink®, Apache®, the squirrel logo, and the Apache feather logo are either registered trademarks or trademarks of The Apache Software Foundation.</p>
+        <p><a href="/privacy-policy.html">Privacy Policy</a> &middot; <a href="/blog/feed.xml">RSS feed</a></p>
+      </div>
+    </div>
+    </div><!-- /.container -->
+
+    <!-- Include all compiled plugins (below), or include individual files as needed -->
+    <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.matchHeight/0.7.0/jquery.matchHeight-min.js"></script>
+    <script src="/js/codetabs.js"></script>
+    <script src="/js/stickysidebar.js"></script>
+
+    <!-- Google Analytics -->
+    <script>
+      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+      })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+      ga('create', 'UA-52545728-1', 'auto');
+      ga('send', 'pageview');
+    </script>
+  </body>
+</html>
diff --git a/content/blog/page10/index.html b/content/blog/page10/index.html
index 5449026..f4bfe66 100644
--- a/content/blog/page10/index.html
+++ b/content/blog/page10/index.html
@@ -196,6 +196,21 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2016/04/22/release-1.0.2.html">Flink 1.0.2 Released</a></h2>
+
+      <p>22 Apr 2016
+      </p>
+
+      <p><p>Today, the Flink community released Flink version <strong>1.0.2</strong>, the second bugfix release of the 1.0 series.</p>
+
+</p>
+
+      <p><a href="/news/2016/04/22/release-1.0.2.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2016/04/14/flink-forward-announce.html">Flink Forward 2016 Call for Submissions Is Now Open</a></h2>
 
       <p>14 Apr 2016 by Aljoscha Krettek (<a href="https://twitter.com/">@aljoscha</a>)
@@ -323,21 +338,6 @@
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2015/11/16/release-0.10.0.html">Announcing Apache Flink 0.10.0</a></h2>
-
-      <p>16 Nov 2015
-      </p>
-
-      <p><p>The Apache Flink community is pleased to announce the availability of the 0.10.0 release. The community put significant effort into improving and extending Apache Flink since the last release, focusing on data stream processing and operational features. About 80 contributors provided bug fixes, improvements, and new features such that in total more than 400 JIRA issues could be resolved.</p>
-
-</p>
-
-      <p><a href="/news/2015/11/16/release-0.10.0.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -348,7 +348,7 @@
       
       </li>
       <li>
-        <span class="page_number ">Page: 10 of 12</span>
+        <span class="page_number ">Page: 10 of 13</span>
       </li>
       <li>
       
@@ -370,6 +370,16 @@
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page11/index.html b/content/blog/page11/index.html
index 3032706..9549b8d 100644
--- a/content/blog/page11/index.html
+++ b/content/blog/page11/index.html
@@ -196,6 +196,21 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2015/11/16/release-0.10.0.html">Announcing Apache Flink 0.10.0</a></h2>
+
+      <p>16 Nov 2015
+      </p>
+
+      <p><p>The Apache Flink community is pleased to announce the availability of the 0.10.0 release. The community put significant effort into improving and extending Apache Flink since the last release, focusing on data stream processing and operational features. About 80 contributors provided bug fixes, improvements, and new features such that in total more than 400 JIRA issues could be resolved.</p>
+
+</p>
+
+      <p><a href="/news/2015/11/16/release-0.10.0.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2015/09/16/off-heap-memory.html">Off-heap Memory in Apache Flink and the curious JIT compiler</a></h2>
 
       <p>16 Sep 2015 by Stephan Ewen (<a href="https://twitter.com/">@stephanewen</a>)
@@ -338,19 +353,6 @@ release is a preview release that contains known issues.</p>
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2015/03/13/peeking-into-Apache-Flinks-Engine-Room.html">Peeking into Apache Flink's Engine Room</a></h2>
-
-      <p>13 Mar 2015 by Fabian Hüske (<a href="https://twitter.com/">@fhueske</a>)
-      </p>
-
-      <p>Joins are prevalent operations in many data processing applications. Most data processing systems feature APIs that make joining data sets very easy. However, the internal algorithms for join processing are much more involved – especially if large data sets need to be efficiently handled. In this blog post, we cut through Apache Flink’s layered architecture and take a look at its internals with a focus on how it handles joins.</p>
-
-      <p><a href="/news/2015/03/13/peeking-into-Apache-Flinks-Engine-Room.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -361,7 +363,7 @@ release is a preview release that contains known issues.</p>
       
       </li>
       <li>
-        <span class="page_number ">Page: 11 of 12</span>
+        <span class="page_number ">Page: 11 of 13</span>
       </li>
       <li>
       
@@ -383,6 +385,16 @@ release is a preview release that contains known issues.</p>
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page12/index.html b/content/blog/page12/index.html
index 8f02812..86bbcc7 100644
--- a/content/blog/page12/index.html
+++ b/content/blog/page12/index.html
@@ -196,6 +196,19 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2015/03/13/peeking-into-Apache-Flinks-Engine-Room.html">Peeking into Apache Flink's Engine Room</a></h2>
+
+      <p>13 Mar 2015 by Fabian Hüske (<a href="https://twitter.com/">@fhueske</a>)
+      </p>
+
+      <p>Joins are prevalent operations in many data processing applications. Most data processing systems feature APIs that make joining data sets very easy. However, the internal algorithms for join processing are much more involved – especially if large data sets need to be efficiently handled. In this blog post, we cut through Apache Flink’s layered architecture and take a look at its internals with a focus on how it handles joins.</p>
+
+      <p><a href="/news/2015/03/13/peeking-into-Apache-Flinks-Engine-Room.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2015/03/02/february-2015-in-flink.html">February 2015 in the Flink community</a></h2>
 
       <p>02 Mar 2015
@@ -337,24 +350,6 @@ and offers a new API including definition of flexible windows.</p>
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2014/08/26/release-0.6.html">Apache Flink 0.6 available</a></h2>
-
-      <p>26 Aug 2014
-      </p>
-
-      <p><p>We are happy to announce the availability of Flink 0.6. This is the
-first release of the system inside the Apache Incubator and under the
-name Flink. Releases up to 0.5 were under the name Stratosphere, the
-academic and open source project that Flink originates from.</p>
-
-</p>
-
-      <p><a href="/news/2014/08/26/release-0.6.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -365,11 +360,11 @@ academic and open source project that Flink originates from.</p>
       
       </li>
       <li>
-        <span class="page_number ">Page: 12 of 12</span>
+        <span class="page_number ">Page: 12 of 13</span>
       </li>
       <li>
       
-        <span>Next</span>
+        <a href="/blog/page13" class="next">Next</a>
       
       </li>
     </ul>
@@ -387,6 +382,16 @@ academic and open source project that Flink originates from.</p>
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page6/index.html b/content/blog/page13/index.html
similarity index 86%
copy from content/blog/page6/index.html
copy to content/blog/page13/index.html
index cc72a8f..9ab6bc9 100644
--- a/content/blog/page6/index.html
+++ b/content/blog/page13/index.html
@@ -196,153 +196,19 @@
     <!-- Blog posts -->
     
     <article>
-      <h2 class="blog-title"><a href="/news/2018/12/22/release-1.6.3.html">Apache Flink 1.6.3 Released</a></h2>
+      <h2 class="blog-title"><a href="/news/2014/08/26/release-0.6.html">Apache Flink 0.6 available</a></h2>
 
-      <p>22 Dec 2018
+      <p>26 Aug 2014
       </p>
 
-      <p><p>The Apache Flink community released the third bugfix version of the Apache Flink 1.6 series.</p>
+      <p><p>We are happy to announce the availability of Flink 0.6. This is the
+first release of the system inside the Apache Incubator and under the
+name Flink. Releases up to 0.5 were under the name Stratosphere, the
+academic and open source project that Flink originates from.</p>
 
 </p>
 
-      <p><a href="/news/2018/12/22/release-1.6.3.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/12/21/release-1.7.1.html">Apache Flink 1.7.1 Released</a></h2>
-
-      <p>21 Dec 2018
-      </p>
-
-      <p><p>The Apache Flink community released the first bugfix version of the Apache Flink 1.7 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/12/21/release-1.7.1.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/11/30/release-1.7.0.html">Apache Flink 1.7.0 Release Announcement</a></h2>
-
-      <p>30 Nov 2018
-       Till Rohrmann (<a href="https://twitter.com/stsffap">@stsffap</a>)</p>
-
-      <p><p>The Apache Flink community is pleased to announce Apache Flink 1.7.0. 
-The latest release includes more than 420 resolved issues and some exciting additions to Flink that we describe in the following sections of this post. 
-Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12315522&amp;version=12343585">complete changelog</a> for more details.</p>
-
-</p>
-
-      <p><a href="/news/2018/11/30/release-1.7.0.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/10/29/release-1.6.2.html">Apache Flink 1.6.2 Released</a></h2>
-
-      <p>29 Oct 2018
-      </p>
-
-      <p><p>The Apache Flink community released the second bugfix version of the Apache Flink 1.6 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/10/29/release-1.6.2.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/10/29/release-1.5.5.html">Apache Flink 1.5.5 Released</a></h2>
-
-      <p>29 Oct 2018
-      </p>
-
-      <p><p>The Apache Flink community released the fifth bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/10/29/release-1.5.5.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/09/20/release-1.6.1.html">Apache Flink 1.6.1 Released</a></h2>
-
-      <p>20 Sep 2018
-      </p>
-
-      <p><p>The Apache Flink community released the first bugfix version of the Apache Flink 1.6 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/09/20/release-1.6.1.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/09/20/release-1.5.4.html">Apache Flink 1.5.4 Released</a></h2>
-
-      <p>20 Sep 2018
-      </p>
-
-      <p><p>The Apache Flink community released the fourth bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/09/20/release-1.5.4.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/08/21/release-1.5.3.html">Apache Flink 1.5.3 Released</a></h2>
-
-      <p>21 Aug 2018
-      </p>
-
-      <p><p>The Apache Flink community released the third bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/08/21/release-1.5.3.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/08/09/release-1.6.0.html">Apache Flink 1.6.0 Release Announcement</a></h2>
-
-      <p>09 Aug 2018
-       Till Rohrmann (<a href="https://twitter.com/stsffap">@stsffap</a>)</p>
-
-      <p><p>The Apache Flink community is proud to announce the 1.6.0 release. Over the past 2 months, the Flink community has worked hard to resolve more than 360 issues. Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12315522&amp;version=12342760">complete changelog</a> for more details.</p>
-
-</p>
-
-      <p><a href="/news/2018/08/09/release-1.6.0.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/07/31/release-1.5.2.html">Apache Flink 1.5.2 Released</a></h2>
-
-      <p>31 Jul 2018
-      </p>
-
-      <p><p>The Apache Flink community released the second bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/07/31/release-1.5.2.html">Continue reading &raquo;</a></p>
+      <p><a href="/news/2014/08/26/release-0.6.html">Continue reading &raquo;</a></p>
     </article>
 
     <hr>
@@ -353,15 +219,15 @@ Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa
     <ul class="pager">
       <li>
       
-        <a href="/blog/page5" class="previous">Previous</a>
+        <a href="/blog/page12" class="previous">Previous</a>
       
       </li>
       <li>
-        <span class="page_number ">Page: 6 of 12</span>
+        <span class="page_number ">Page: 13 of 13</span>
       </li>
       <li>
       
-        <a href="/blog/page7" class="next">Next</a>
+        <span>Next</span>
       
       </li>
     </ul>
@@ -379,6 +245,16 @@ Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page2/index.html b/content/blog/page2/index.html
index 9225782..611a819 100644
--- a/content/blog/page2/index.html
+++ b/content/blog/page2/index.html
@@ -196,6 +196,19 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2020/05/07/community-update.html">Flink Community Update - May'20</a></h2>
+
+      <p>07 May 2020
+       Marta Paes (<a href="https://twitter.com/morsapaes">@morsapaes</a>)</p>
+
+      <p>Can you smell it? It’s release month! This time around, we’re warming up for Flink 1.11 and peeping back to the past month in the Flink community — with the release of Stateful Functions 2.0, a new self-paced Flink training and some efforts to improve the Flink documentation experience.</p>
+
+      <p><a href="/news/2020/05/07/community-update.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2020/05/04/season-of-docs.html">Applying to Google Season of Docs 2020</a></h2>
 
       <p>04 May 2020
@@ -319,19 +332,6 @@ This release marks a big milestone: Stateful Functions 2.0 is not only an API up
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/ecosystem/2020/02/22/apache-beam-how-beam-runs-on-top-of-flink.html">Apache Beam: How Beam Runs on Top of Flink</a></h2>
-
-      <p>22 Feb 2020
-       Maximilian Michels (<a href="https://twitter.com/stadtlegende">@stadtlegende</a>) &amp; Markos Sfikas (<a href="https://twitter.com/MarkSfik">@MarkSfik</a>)</p>
-
-      <p>This blog post discusses the reasons to use Flink together with Beam for your stream processing needs and takes a closer look at how Flink works with Beam under the hood.</p>
-
-      <p><a href="/ecosystem/2020/02/22/apache-beam-how-beam-runs-on-top-of-flink.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -342,7 +342,7 @@ This release marks a big milestone: Stateful Functions 2.0 is not only an API up
       
       </li>
       <li>
-        <span class="page_number ">Page: 2 of 12</span>
+        <span class="page_number ">Page: 2 of 13</span>
       </li>
       <li>
       
@@ -364,6 +364,16 @@ This release marks a big milestone: Stateful Functions 2.0 is not only an API up
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page3/index.html b/content/blog/page3/index.html
index b64acf1..f50a4d6 100644
--- a/content/blog/page3/index.html
+++ b/content/blog/page3/index.html
@@ -196,6 +196,19 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/ecosystem/2020/02/22/apache-beam-how-beam-runs-on-top-of-flink.html">Apache Beam: How Beam Runs on Top of Flink</a></h2>
+
+      <p>22 Feb 2020
+       Maximilian Michels (<a href="https://twitter.com/stadtlegende">@stadtlegende</a>) &amp; Markos Sfikas (<a href="https://twitter.com/MarkSfik">@MarkSfik</a>)</p>
+
+      <p>This blog post discusses the reasons to use Flink together with Beam for your stream processing needs and takes a closer look at how Flink works with Beam under the hood.</p>
+
+      <p><a href="/ecosystem/2020/02/22/apache-beam-how-beam-runs-on-top-of-flink.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2020/02/20/ddl.html">No Java Required: Configuring Sources and Sinks in SQL</a></h2>
 
       <p>20 Feb 2020
@@ -318,21 +331,6 @@
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2019/10/18/release-1.9.1.html">Apache Flink 1.9.1 Released</a></h2>
-
-      <p>18 Oct 2019
-       Jark Wu (<a href="https://twitter.com/JarkWu">@JarkWu</a>)</p>
-
-      <p><p>The Apache Flink community released the first bugfix version of the Apache Flink 1.9 series.</p>
-
-</p>
-
-      <p><a href="/news/2019/10/18/release-1.9.1.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -343,7 +341,7 @@
       
       </li>
       <li>
-        <span class="page_number ">Page: 3 of 12</span>
+        <span class="page_number ">Page: 3 of 13</span>
       </li>
       <li>
       
@@ -365,6 +363,16 @@
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page4/index.html b/content/blog/page4/index.html
index 9991b85..5da42bd 100644
--- a/content/blog/page4/index.html
+++ b/content/blog/page4/index.html
@@ -196,6 +196,21 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2019/10/18/release-1.9.1.html">Apache Flink 1.9.1 Released</a></h2>
+
+      <p>18 Oct 2019
+       Jark Wu (<a href="https://twitter.com/JarkWu">@JarkWu</a>)</p>
+
+      <p><p>The Apache Flink community released the first bugfix version of the Apache Flink 1.9 series.</p>
+
+</p>
+
+      <p><a href="/news/2019/10/18/release-1.9.1.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/feature/2019/09/13/state-processor-api.html">The State Processor API: How to Read, write and modify the state of Flink applications</a></h2>
 
       <p>13 Sep 2019
@@ -319,19 +334,6 @@
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/2019/05/14/temporal-tables.html">Flux capacitor, huh? Temporal Tables and Joins in Streaming SQL</a></h2>
-
-      <p>14 May 2019
-       Marta Paes (<a href="https://twitter.com/morsapaes">@morsapaes</a>)</p>
-
-      <p>Apache Flink natively supports temporal table joins since the 1.7 release for straightforward temporal data handling. In this blog post, we provide an overview of how this new concept can be leveraged for effective point-in-time analysis in streaming scenarios.</p>
-
-      <p><a href="/2019/05/14/temporal-tables.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -342,7 +344,7 @@
       
       </li>
       <li>
-        <span class="page_number ">Page: 4 of 12</span>
+        <span class="page_number ">Page: 4 of 13</span>
       </li>
       <li>
       
@@ -364,6 +366,16 @@
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page5/index.html b/content/blog/page5/index.html
index e015d58..24f0dc1 100644
--- a/content/blog/page5/index.html
+++ b/content/blog/page5/index.html
@@ -196,6 +196,19 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/2019/05/14/temporal-tables.html">Flux capacitor, huh? Temporal Tables and Joins in Streaming SQL</a></h2>
+
+      <p>14 May 2019
+       Marta Paes (<a href="https://twitter.com/morsapaes">@morsapaes</a>)</p>
+
+      <p>Apache Flink natively supports temporal table joins since the 1.7 release for straightforward temporal data handling. In this blog post, we provide an overview of how this new concept can be leveraged for effective point-in-time analysis in streaming scenarios.</p>
+
+      <p><a href="/2019/05/14/temporal-tables.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/2019/05/03/pulsar-flink.html">When Flink & Pulsar Come Together</a></h2>
 
       <p>03 May 2019
@@ -324,21 +337,6 @@ for more details.</p>
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/12/26/release-1.5.6.html">Apache Flink 1.5.6 Released</a></h2>
-
-      <p>26 Dec 2018
-      </p>
-
-      <p><p>The Apache Flink community released the sixth and last bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/12/26/release-1.5.6.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -349,7 +347,7 @@ for more details.</p>
       
       </li>
       <li>
-        <span class="page_number ">Page: 5 of 12</span>
+        <span class="page_number ">Page: 5 of 13</span>
       </li>
       <li>
       
@@ -371,6 +369,16 @@ for more details.</p>
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page6/index.html b/content/blog/page6/index.html
index cc72a8f..30bef6a 100644
--- a/content/blog/page6/index.html
+++ b/content/blog/page6/index.html
@@ -196,6 +196,21 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2018/12/26/release-1.5.6.html">Apache Flink 1.5.6 Released</a></h2>
+
+      <p>26 Dec 2018
+      </p>
+
+      <p><p>The Apache Flink community released the sixth and last bugfix version of the Apache Flink 1.5 series.</p>
+
+</p>
+
+      <p><a href="/news/2018/12/26/release-1.5.6.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2018/12/22/release-1.6.3.html">Apache Flink 1.6.3 Released</a></h2>
 
       <p>22 Dec 2018
@@ -332,21 +347,6 @@ Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2018/07/31/release-1.5.2.html">Apache Flink 1.5.2 Released</a></h2>
-
-      <p>31 Jul 2018
-      </p>
-
-      <p><p>The Apache Flink community released the second bugfix version of the Apache Flink 1.5 series.</p>
-
-</p>
-
-      <p><a href="/news/2018/07/31/release-1.5.2.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -357,7 +357,7 @@ Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa
       
       </li>
       <li>
-        <span class="page_number ">Page: 6 of 12</span>
+        <span class="page_number ">Page: 6 of 13</span>
       </li>
       <li>
       
@@ -379,6 +379,16 @@ Please check the <a href="https://issues.apache.org/jira/secure/ReleaseNote.jspa
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page7/index.html b/content/blog/page7/index.html
index d651e9e..cadaba9 100644
--- a/content/blog/page7/index.html
+++ b/content/blog/page7/index.html
@@ -196,6 +196,21 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2018/07/31/release-1.5.2.html">Apache Flink 1.5.2 Released</a></h2>
+
+      <p>31 Jul 2018
+      </p>
+
+      <p><p>The Apache Flink community released the second bugfix version of the Apache Flink 1.5 series.</p>
+
+</p>
+
+      <p><a href="/news/2018/07/31/release-1.5.2.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2018/07/12/release-1.5.1.html">Apache Flink 1.5.1 Released</a></h2>
 
       <p>12 Jul 2018
@@ -326,24 +341,6 @@ for more detail.</p>
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2017/11/22/release-1.4-and-1.5-timeline.html">Looking Ahead to Apache Flink 1.4.0 and 1.5.0</a></h2>
-
-      <p>22 Nov 2017
-       Stephan Ewen (<a href="https://twitter.com/StephanEwen">@StephanEwen</a>), Aljoscha Krettek (<a href="https://twitter.com/aljoscha">@aljoscha</a>), &amp; Mike Winters (<a href="https://twitter.com/wints">@wints</a>)</p>
-
-      <p><p>The Apache Flink 1.4.0 release is on track to happen in the next couple of weeks, and for all of the
-readers out there who haven’t been following the release discussion on <a href="http://flink.apache.org/community.html#mailing-lists">Flink’s developer mailing
-list</a>, we’d like to provide some details on
-what’s coming in Flink 1.4.0 as well as a preview of what the Flink community will save for 1.5.0.</p>
-
-</p>
-
-      <p><a href="/news/2017/11/22/release-1.4-and-1.5-timeline.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -354,7 +351,7 @@ what’s coming in Flink 1.4.0 as well as a preview of what the Flink community
       
       </li>
       <li>
-        <span class="page_number ">Page: 7 of 12</span>
+        <span class="page_number ">Page: 7 of 13</span>
       </li>
       <li>
       
@@ -376,6 +373,16 @@ what’s coming in Flink 1.4.0 as well as a preview of what the Flink community
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page8/index.html b/content/blog/page8/index.html
index 2135f92..73821a5 100644
--- a/content/blog/page8/index.html
+++ b/content/blog/page8/index.html
@@ -196,6 +196,24 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2017/11/22/release-1.4-and-1.5-timeline.html">Looking Ahead to Apache Flink 1.4.0 and 1.5.0</a></h2>
+
+      <p>22 Nov 2017
+       Stephan Ewen (<a href="https://twitter.com/StephanEwen">@StephanEwen</a>), Aljoscha Krettek (<a href="https://twitter.com/aljoscha">@aljoscha</a>), &amp; Mike Winters (<a href="https://twitter.com/wints">@wints</a>)</p>
+
+      <p><p>The Apache Flink 1.4.0 release is on track to happen in the next couple of weeks, and for all of the
+readers out there who haven’t been following the release discussion on <a href="http://flink.apache.org/community.html#mailing-lists">Flink’s developer mailing
+list</a>, we’d like to provide some details on
+what’s coming in Flink 1.4.0 as well as a preview of what the Flink community will save for 1.5.0.</p>
+
+</p>
+
+      <p><a href="/news/2017/11/22/release-1.4-and-1.5-timeline.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2017/08/05/release-1.3.2.html">Apache Flink 1.3.2 Released</a></h2>
 
       <p>05 Aug 2017
@@ -325,19 +343,6 @@
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2017/02/06/release-1.2.0.html">Announcing Apache Flink 1.2.0</a></h2>
-
-      <p>06 Feb 2017 by Robert Metzger
-      </p>
-
-      <p><p>The Apache Flink community is excited to announce the 1.2.0 release.</p></p>
-
-      <p><a href="/news/2017/02/06/release-1.2.0.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -348,7 +353,7 @@
       
       </li>
       <li>
-        <span class="page_number ">Page: 8 of 12</span>
+        <span class="page_number ">Page: 8 of 13</span>
       </li>
       <li>
       
@@ -370,6 +375,16 @@
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>
 
       
diff --git a/content/blog/page9/index.html b/content/blog/page9/index.html
index 5b4d424..9f4f978 100644
--- a/content/blog/page9/index.html
+++ b/content/blog/page9/index.html
@@ -196,6 +196,19 @@
     <!-- Blog posts -->
     
     <article>
+      <h2 class="blog-title"><a href="/news/2017/02/06/release-1.2.0.html">Announcing Apache Flink 1.2.0</a></h2>
+
+      <p>06 Feb 2017 by Robert Metzger
+      </p>
+
+      <p><p>The Apache Flink community is excited to announce the 1.2.0 release.</p></p>
+
+      <p><a href="/news/2017/02/06/release-1.2.0.html">Continue reading &raquo;</a></p>
+    </article>
+
+    <hr>
+    
+    <article>
       <h2 class="blog-title"><a href="/news/2016/12/21/release-1.1.4.html">Apache Flink 1.1.4 Released</a></h2>
 
       <p>21 Dec 2016
@@ -327,21 +340,6 @@
 
     <hr>
     
-    <article>
-      <h2 class="blog-title"><a href="/news/2016/04/22/release-1.0.2.html">Flink 1.0.2 Released</a></h2>
-
-      <p>22 Apr 2016
-      </p>
-
-      <p><p>Today, the Flink community released Flink version <strong>1.0.2</strong>, the second bugfix release of the 1.0 series.</p>
-
-</p>
-
-      <p><a href="/news/2016/04/22/release-1.0.2.html">Continue reading &raquo;</a></p>
-    </article>
-
-    <hr>
-    
 
     <!-- Pagination links -->
     
@@ -352,7 +350,7 @@
       
       </li>
       <li>
-        <span class="page_number ">Page: 9 of 12</span>
+        <span class="page_number ">Page: 9 of 13</span>
       </li>
       <li>
       
@@ -374,6 +372,16 @@
 
     <ul id="markdown-toc">
       
+      <li><a href="/2020/07/28/flink-sql-demo-building-e2e-streaming-application.html">Flink SQL Demo: Building an End-to-End Streaming Application</a></li>
+
+      
+        
+      
+    
+      
+      
+
+      
       <li><a href="/2020/07/23/catalogs.html">Sharing is caring - Catalogs in Flink SQL</a></li>