You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@streampipes.apache.org by ri...@apache.org on 2021/05/04 09:28:39 UTC

[incubator-streampipes-website] branch dev updated: [STREAMPIPES-197] Release new docs version 0.67.0

This is an automated email from the ASF dual-hosted git repository.

riemer pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampipes-website.git


The following commit(s) were added to refs/heads/dev by this push:
     new e13cfef  [STREAMPIPES-197] Release new docs version 0.67.0
e13cfef is described below

commit e13cfefaeeb91e5ce1b0859b98e0ecdb5abd9ef9
Author: Dominik Riemer <ri...@fzi.de>
AuthorDate: Tue May 4 11:28:25 2021 +0200

    [STREAMPIPES-197] Release new docs version 0.67.0
---
 documentation/website/i18n/en.json                 | 160 +++++++
 .../version-0.67.0/01_try-installation.md          | 140 ++++++
 .../version-0.67.0/01_try-overview.md              | 111 +++++
 .../version-0.67.0/01_try-tutorial.md              |  21 +
 .../version-0.67.0/02_concepts-adapter.md          |   8 +
 .../version-0.67.0/02_concepts-data-streams.md     |   8 +
 .../version-0.67.0/02_concepts-glossary.md         |   8 +
 .../version-0.67.0/02_concepts-overview.md         |  36 ++
 .../version-0.67.0/02_concepts-pipeline.md         |   8 +
 .../version-0.67.0/03_use-configurations.md        |   8 +
 .../version-0.67.0/03_use-connect.md               |  69 +++
 .../version-0.67.0/03_use-dashboard.md             |  67 +++
 .../version-0.67.0/03_use-data-explorer.md         |  19 +
 .../version-0.67.0/03_use-managing-pipelines.md    |  53 +++
 .../version-0.67.0/03_use-notifications.md         |  26 ++
 .../version-0.67.0/03_use-pipeline-editor.md       |  65 +++
 .../version-0.67.0/05_deploy-docker.md             |  74 +++
 .../version-0.67.0/05_deploy-kubernetes.md         |  61 +++
 .../version-0.67.0/05_deploy-use-ssl.md            |  36 ++
 .../version-0.67.0/06_extend-archetypes.md         | 144 ++++++
 .../versioned_docs/version-0.67.0/06_extend-cli.md | 187 ++++++++
 .../version-0.67.0/06_extend-sdk-event-model.md    | 142 ++++++
 .../06_extend-sdk-output-strategies.md             | 347 ++++++++++++++
 .../06_extend-sdk-static-properties.md             | 265 +++++++++++
 .../06_extend-sdk-stream-requirements.md           | 179 ++++++++
 .../version-0.67.0/06_extend-setup.md              |  32 ++
 .../06_extend-tutorial-data-processors.md          | 500 +++++++++++++++++++++
 .../06_extend-tutorial-data-sinks.md               | 247 ++++++++++
 .../06_extend-tutorial-data-sources.md             | 283 ++++++++++++
 .../version-0.67.0/07_technicals-architecture.md   |  63 +++
 .../version-0.67.0/07_technicals-configuration.md  |  59 +++
 .../version-0.67.0/07_technicals-messaging.md      |   8 +
 .../07_technicals-runtime-wrappers.md              |   8 +
 .../version-0.67.0/07_technicals-user-guidance.md  |   8 +
 .../versioned_docs/version-0.67.0/08_debugging.md  |   8 +
 .../versioned_docs/version-0.67.0/08_monitoring.md |   8 +
 .../versioned_docs/version-0.67.0/09_contribute.md |  18 +
 .../versioned_docs/version-0.67.0/09_get-help.md   |  27 ++
 .../version-0.67.0/dev-guide-archetype.md          |   7 +
 .../version-0.67.0/user-guide-installation.md      | 121 +++++
 .../version-0.67.0/user-guide-introduction.md      |  62 +++
 .../version-0.67.0-sidebars.json                   | 221 +++++++++
 documentation/website/versions.json                |   1 +
 43 files changed, 3923 insertions(+)

diff --git a/documentation/website/i18n/en.json b/documentation/website/i18n/en.json
index 560afbe..9fa27af 100644
--- a/documentation/website/i18n/en.json
+++ b/documentation/website/i18n/en.json
@@ -1980,6 +1980,166 @@
       "version-0.66.0/version-0.66.0-user-guide-installation": {
         "title": "Installation",
         "sidebar_label": "Installation"
+      },
+      "version-0.67.0/version-0.67.0-try-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.67.0/version-0.67.0-user-guide-introduction": {
+        "title": "Apache StreamPipes Documentation",
+        "sidebar_label": "Overview"
+      },
+      "version-0.67.0/version-0.67.0-try-tutorial": {
+        "title": "Interactive Tutorial",
+        "sidebar_label": "Interactive Tutorial"
+      },
+      "version-0.67.0/version-0.67.0-concepts-adapter": {
+        "title": "Data Adapters",
+        "sidebar_label": "Data Adapters"
+      },
+      "version-0.67.0/version-0.67.0-concepts-data-streams": {
+        "title": "Data Streams",
+        "sidebar_label": "Data Streams"
+      },
+      "version-0.67.0/version-0.67.0-concepts-glossary": {
+        "title": "Glossary",
+        "sidebar_label": "Glossary"
+      },
+      "version-0.67.0/version-0.67.0-concepts-overview": {
+        "title": "StreamPipes Concepts",
+        "sidebar_label": "Overview"
+      },
+      "version-0.67.0/version-0.67.0-concepts-pipelines": {
+        "title": "Pipelines",
+        "sidebar_label": "Pipelines"
+      },
+      "version-0.67.0/version-0.67.0-use-configurations": {
+        "title": "Configurations",
+        "sidebar_label": "Configurations"
+      },
+      "version-0.67.0/version-0.67.0-use-connect": {
+        "title": "StreamPipes Connect",
+        "sidebar_label": "StreamPipes Connect"
+      },
+      "version-0.67.0/version-0.67.0-use-dashboard": {
+        "title": "Live Dashboard",
+        "sidebar_label": "Live Dashboard"
+      },
+      "version-0.67.0/version-0.67.0-use-data-explorer": {
+        "title": "Data Explorer",
+        "sidebar_label": "Data Explorer"
+      },
+      "version-0.67.0/version-0.67.0-use-managing-pipelines": {
+        "title": "Managing Pipelines",
+        "sidebar_label": "Managing Pipelines"
+      },
+      "version-0.67.0/version-0.67.0-use-notifications": {
+        "title": "Notifications",
+        "sidebar_label": "Notifications"
+      },
+      "version-0.67.0/version-0.67.0-use-pipeline-editor": {
+        "title": "Pipeline Editor",
+        "sidebar_label": "Pipeline Editor"
+      },
+      "version-0.67.0/version-0.67.0-deploy-docker": {
+        "title": "Docker Deployment",
+        "sidebar_label": "Docker Deployment"
+      },
+      "version-0.67.0/version-0.67.0-deploy-kubernetes": {
+        "title": "Kubernetes Deployment",
+        "sidebar_label": "Kubernetes Deployment"
+      },
+      "version-0.67.0/version-0.67.0-deploy-use-ssl": {
+        "title": "Use SSL",
+        "sidebar_label": "Use SSL"
+      },
+      "version-0.67.0/version-0.67.0-extend-archetypes": {
+        "title": "Maven Archetypes",
+        "sidebar_label": "Maven Archetypes"
+      },
+      "version-0.67.0/version-0.67.0-extend-cli": {
+        "title": "StreamPipes CLI",
+        "sidebar_label": "StreamPipes CLI"
+      },
+      "version-0.67.0/version-0.67.0-extend-sdk-event-model": {
+        "title": "SDK Guide: Event Model",
+        "sidebar_label": "SDK: Event Model"
+      },
+      "version-0.67.0/version-0.67.0-extend-sdk-output-strategies": {
+        "title": "SDK Guide: Output Strategies",
+        "sidebar_label": "SDK: Output Strategies"
+      },
+      "version-0.67.0/version-0.67.0-extend-sdk-static-properties": {
+        "title": "SDK Guide: Static Properties",
+        "sidebar_label": "SDK: Static Properties"
+      },
+      "version-0.67.0/version-0.67.0-extend-sdk-stream-requirements": {
+        "title": "SDK Guide: Stream Requirements",
+        "sidebar_label": "SDK: Stream Requirements"
+      },
+      "version-0.67.0/version-0.67.0-extend-setup": {
+        "title": "Development Setup",
+        "sidebar_label": "Development Setup"
+      },
+      "version-0.67.0/version-0.67.0-extend-tutorial-data-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.67.0/version-0.67.0-extend-tutorial-data-sinks": {
+        "title": "Tutorial: Data Sinks",
+        "sidebar_label": "Tutorial: Data Sinks"
+      },
+      "version-0.67.0/version-0.67.0-extend-tutorial-data-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      },
+      "version-0.67.0/version-0.67.0-technicals-architecture": {
+        "title": "Architecture",
+        "sidebar_label": "Architecture"
+      },
+      "version-0.67.0/version-0.67.0-technicals-configuration": {
+        "title": "Configuration",
+        "sidebar_label": "Configuration"
+      },
+      "version-0.67.0/version-0.67.0-technicals-messaging": {
+        "title": "Messaging",
+        "sidebar_label": "Messaging"
+      },
+      "version-0.67.0/version-0.67.0-technicals-runtime-wrappers": {
+        "title": "Runtime Wrappers",
+        "sidebar_label": "Runtime Wrappers"
+      },
+      "version-0.67.0/version-0.67.0-technicals-user-guidance": {
+        "title": "User Guidance",
+        "sidebar_label": "User Guidance"
+      },
+      "version-0.67.0/version-0.67.0-debugging-debugging": {
+        "title": "Debugging",
+        "sidebar_label": "Debugging"
+      },
+      "version-0.67.0/version-0.67.0-debugging-monitoring": {
+        "title": "Monitoring",
+        "sidebar_label": "Monitoring"
+      },
+      "version-0.67.0/version-0.67.0-community-contribute": {
+        "title": "Contribute",
+        "sidebar_label": "Contribute"
+      },
+      "version-0.67.0/version-0.67.0-community-get-help": {
+        "title": "Get Help",
+        "sidebar_label": "Get Help"
+      },
+      "version-0.67.0/version-0.67.0-dev-guide-archetype": {
+        "title": "Start Developing",
+        "sidebar_label": "Start Developing"
+      },
+      "version-0.67.0/version-0.67.0-user-guide-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.67.0/version-0.67.0-user-guide-introduction-old": {
+        "title": "Introduction",
+        "sidebar_label": "Introduction"
       }
     },
     "links": {
diff --git a/documentation/website/versioned_docs/version-0.67.0/01_try-installation.md b/documentation/website/versioned_docs/version-0.67.0/01_try-installation.md
new file mode 100644
index 0000000..4efc410
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/01_try-installation.md
@@ -0,0 +1,140 @@
+---
+id: version-0.67.0-try-installation
+title: Installation
+sidebar_label: Installation
+original_id: try-installation
+---
+
+The easiest way to install StreamPipes is our Docker-based installation. For production-grade deployments, we also
+recommend looking at our Kubernetes support, which is also part of the installation kit.
+
+## Prerequisites
+
+The Docker-based installation requires **Docker** and **Docker Compose** to be installed on the target machine.
+Installation instructions can be found below.
+
+<div class="admonition info">
+<div class="admonition-title">Install Docker</div>
+<p>Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in again) and check that Docker is installed correctly by executing docker-run hello-world</p>
+</div>
+
+<div class="admonition info">
+<div class="admonition-title">Configure Docker</div>
+<p>By default, Docker uses only a limited number of CPU cores and memory.
+       If you run StreamPipes on Windows or on a Mac you need to adjust the default settings.
+       To do that, click on the Docker icon in your tab bar and open the preferences.
+       Go to the advanced preferences and set the **number of CPUs to 6** (recommended) and the **Memory to 4GB**.
+       After changing the settings, Docker needs to be restarted.</p></div>
+
+### Supported operating systems
+
+The Docker-based installation supports the operating systems **Linux**, **Mac OS X** and **Windows 10**. Older windows
+versions are not fully compatible with Docker. Linux VMs running under Windows might cause network problems with Docker,
+therefore some manual work might be needed to make StreamPipes run properly.
+
+### Web Browser
+
+The StreamPipes application itself will be accessible through a web browser. We recommend a recent version of Chrome (
+best experience), Firefox or Edge.
+
+## Install StreamPipes
+
+<ul style="padding-left:0">
+  <li class="installation-step" style="margin-top:20px;">
+      <div class="wrapper-container" style="">
+          <div class="wrapper-step">
+              <span class="fa-stack fa-2x">
+                   <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                   <strong class="fa-stack-1x" style="color:white;">1</strong>
+              </span>
+          </div>
+          <div class="wrapper-instruction">
+              Download the latest Apache StreamPipes release and extract the zip file to a directory of your choice.
+                  <table class="table" style="margin-top:30px;">
+                      <thead>
+                      <tr style="background:rgb(27, 20, 100);color:white;">
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">File</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Version</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Release Date</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Signatures</th>
+                      </tr>
+                      </thead>
+                      <tbody>
+                      <tr>
+                          <td><a href="https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=incubator/streampipes/installer/0.67.0/apache-streampipes-installer-0.67.0-incubating-source-release.zip">apache-streampipes-installer-0.67.0-incubating-source-release.zip</a></td>
+                          <td>0.67.0</td>
+                          <td>2020-10-27</td>
+                          <td>
+                              <a href="https://downloads.apache.org/incubator/streampipes/installer/0.67.0/apache-streampipes-installer-0.67.0-incubating-source-release.zip.sha512">SHA</a>
+                              <a href="https://downloads.apache.org/incubator/streampipes/installer/0.67.0/apache-streampipes-installer-0.67.0-incubating-source-release.zip.asc">PGP</a>
+                          </td>
+                      </tr>
+                      </tbody>
+                  </table>
+              <div class="row">
+                  <div class="alert alert-info" role="alert">
+                    The above release file should be verified using the PGP signatures and the <a href="https://downloads.apache.org/incubator/streampipes/KEYS">project release KEYS</a>. See the official ASF <a target="asf" href="https://www.apache.org/dyn/closer.cgi#verify">verification instructions</a> for a description of using the PGP and KEYS files for verification. A SHA512 checksum is also provided as an additional verification method.
+                  </div>
+              </div>
+          </div>
+      </div>
+  </li>
+  <li class="installation-step">
+      <div class="wrapper-container">
+          <div class="wrapper-step">
+              <span class="fa-stack fa-2x">
+                   <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                   <strong class="fa-stack-1x" style="color:white;">2</strong>
+              </span>
+          </div>
+          <div class="wrapper-instruction">
+             <div style="margin-bottom:5px;">In a command prompt, open the folder <code>compose</code> and run <code>docker-compose up -d</code>.
+                <div class="row" style="margin-top:10px;">              
+                    <div class="alert alert-info" role="alert">The folder contains two docker-compose files: The standard file contains the so-called <i>lite</i> installation, which includes less pipeline elements, but also requires for less performant hardware. The <i>full</i> version includes also the Apache Flink wrapper and more pipeline elements, but requires for more memory. See the Github <a href="https://github.com/apache/incubator-streampipes-installer/tree/master/compose">READM [...]
+            </div>
+            </div>
+        </div>
+    </div>
+  </li>
+    <li class="installation-step">
+        <div class="wrapper-container" style="align-items: center;justify-content: center;">
+            <div class="wrapper-step">
+                <span class="fa-stack fa-2x">
+                     <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                     <strong class="fa-stack-1x" style="color:white;">3</strong>
+                </span>
+            </div>
+            <div class="wrapper-instruction">
+                Open your browser, navigate to http://localhost:80 (or the domain name of your server) and finish the setup according to the instructions below.
+            </div>
+        </div>
+    </li>
+</ul>
+
+## Setup StreamPipes
+
+Once you've opened the browser at the URL given above, you should see StreamPipes application as shown below. To set up
+the system, enter an email address and a password and click on install. After the installation has finished, continue by clicking on "Go to login
+page", once all components are successfully configured.
+
+On the login page, enter your credentials, then you should be forwarded to the home page.
+
+Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline!
+
+<div class="my-carousel docs-carousel">
+    <img src="/docs/img/01_try-installation/01_register_user.png" alt="Set Up User">
+    <img src="/docs/img/01_try-installation/02_user_set_up.png" alt="SetUp StreamPipes Components">
+    <img src="/docs/img/01_try-installation/03_login.png" alt="Go to login page">
+    <img src="/docs/img/01_try-installation/04_home.png" alt="Home page">
+</div>
+
+<div class="admonition error">
+<div class="admonition-title">Errors during the installation process</div>
+<p>In most cases, errors during the installation are due to an under-powered system.<br/>
+If there is a problem with any of the components, please restart the whole system (<code>docker-compose down</code> and eventually also delete the volumes).
+   Please also make sure that your system meets the hardware requirements as mentioned in the first section of the installation guide.</p>
+</div>
+
+## Next Steps
+
+That's it! To ease your first steps with StreamPipes, we've created an [interactive tutorial](try-tutorial).
diff --git a/documentation/website/versioned_docs/version-0.67.0/01_try-overview.md b/documentation/website/versioned_docs/version-0.67.0/01_try-overview.md
new file mode 100644
index 0000000..0327d5c
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/01_try-overview.md
@@ -0,0 +1,111 @@
+---
+id: version-0.67.0-user-guide-introduction
+title: Apache StreamPipes Documentation
+sidebar_label: Overview
+original_id: user-guide-introduction
+---
+
+This is the documentation of Apache StreamPipes.
+
+<img class="docs-image docs-image-small" src="/docs/img/01_try-overview/01_streampipes-overview.png" alt="StreamPipes Overview">
+
+
+<div class="container grid col-3">
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸš€ Try
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Your first steps with Apache StreamPipes.</b></div>
+               <a href="try-installation">Install StreamPipes</a>, <a href="try-tutorial">Interactive Tutorial</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸ’‘ Concepts
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Learn about some general concepts of StreamPipes.</b></div>
+                <a href="concepts-overview">Overview</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸŽ“ Use
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Learn how to use the various modules of StreamPipes.</b></div>
+                <a href="use-connect">StreamPipes Connect</a>, <a href="use-pipeline-editor">Pipeline Editor</a>, <a href="use-managing-pipelines">Managing Pipelines</a>, 
+                <a href="use-live-dashboard">Live Dashboard</a>, <a href="use-data-explorer">Data Explorer</a>, <a href="use-notifications">Notifications</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸ“š Pipeline Elements
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Available pipeline elements in StreamPipes.</b></div>
+                <a href="pe/org.apache.streampipes.connect.protocol.stream.kafka">Adapters</a>, 
+                <a href="pe/org.apache.streampipes.processors.aggregation.flink.aggregation">Data Processors</a>, 
+                <a href="pe/org.apache.streampipes.sinks.databases.jvm.couchdb">Data Sinks</a> 
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                ⚑ Deploy 
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>How to set up StreamPipes in test and production environments.</b></div>
+                <a href="deploy-docker">Docker</a>, <a href="deploy-kubernetes">Kubernetes</a>, <a href="deploy-use-ssl">Use SSL</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸ’» Extend
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Write your own pipeline elements for StreamPipes.</b></div>
+                <a href="extend-setup">Development Setup</a>, <a href="extend-cli">CLI</a>, <a href="extend-archetypes">Maven Archetypes</a>,
+                <a href="extend-tutorial-data-sources">Tutorial Data Sources</a>, <a href="extend-tutorial-data-processors">Tutorial Data Processors</a>, <a href="extend-tutorial-data-sinks">Tutorial Data Sinks</a>,
+                <a href="extend-sdk-event-model">Event Model</a>, <a href="extend-sdk-stream-requirements">Stream Requirements</a>, <a href="extend-sdk-static-properties">Static Properties</a>,
+                <a href="extend-output-strategies">Output Strategies</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸ”§ Technicals
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Learn about technical concepts behind the curtain.</b></div>
+                <a href="technicals-architecture">Architecture</a>, <a href="technicals-user-guidance">User Guidance</a>, <a href="technicals-runtime-wrappers">Runtime Wrappers</a>,
+                <a href="technicals-messaging">Messaging</a>, <a href="technicals-configuration">Configuration</a>
+            </div>
+        </div>
+    </div>
+    <div class="column">
+        <div class="toc-box">
+            <div class="toc-header border-thin">
+                πŸ‘ͺ Community
+            </div>
+            <div class="toc-content">
+                <div class="toc-section-header"><b>Get support and learn how to contribute to StreamPipes.</b></div>
+                <a href="community-get-help">Get Help</a>, <a href="community-contribute">Contribute</a>
+            </div>
+        </div>
+    </div>
+</div>
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/01_try-tutorial.md b/documentation/website/versioned_docs/version-0.67.0/01_try-tutorial.md
new file mode 100644
index 0000000..19a3bb1
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/01_try-tutorial.md
@@ -0,0 +1,21 @@
+---
+id: version-0.67.0-try-tutorial
+title: Interactive Tutorial
+sidebar_label: Interactive Tutorial
+original_id: try-tutorial
+---
+
+Once you've installed StreamPipes and see the home screen, you'll see a number of modules that are part of the StreamPipes toolbox.
+As a first step, you might be interested in taking the interactive tutorial that helps you create your first pipeline.
+Switch to the **Pipeline Editor** and you will see a dialog that asks you for the start of the interactive tutorial:
+
+<img class="docs-image" src="/docs/img/01_try-tutorial/01_tutorial-welcome.png" alt="Tutorial Welcome Page">
+
+Click **Start Tour** to start the tour. In this tour, you'll build a simple pipeline that monitors (simulated) live data from a water tank system.
+Within the tour, perform the actions as recommended and click **Next** to trigger the next steps. Some tour steps won't require to select **Next**, but wait for you to take the recommended action.
+You can cancel the tour anytime by clicking the **Exit Tour** button.
+
+<img class="docs-image" src="/docs/img/01_try-tutorial/02_tutorial-process.png" alt="Tutorial Welcome Page">
+
+Now after you've built your first pipeline, you might be interested in reading about some of our core [concepts]() 
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/02_concepts-adapter.md b/documentation/website/versioned_docs/version-0.67.0/02_concepts-adapter.md
new file mode 100644
index 0000000..0d77db5
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/02_concepts-adapter.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-concepts-adapter
+title: Data Adapters
+sidebar_label: Data Adapters
+original_id: concepts-adapter
+---
+
+tbd
diff --git a/documentation/website/versioned_docs/version-0.67.0/02_concepts-data-streams.md b/documentation/website/versioned_docs/version-0.67.0/02_concepts-data-streams.md
new file mode 100644
index 0000000..2de5c80
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/02_concepts-data-streams.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-concepts-data-streams
+title: Data Streams
+sidebar_label: Data Streams
+original_id: concepts-data-streams
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/02_concepts-glossary.md b/documentation/website/versioned_docs/version-0.67.0/02_concepts-glossary.md
new file mode 100644
index 0000000..d241643
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/02_concepts-glossary.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-concepts-glossary
+title: Glossary
+sidebar_label: Glossary
+original_id: concepts-glossary
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/02_concepts-overview.md b/documentation/website/versioned_docs/version-0.67.0/02_concepts-overview.md
new file mode 100644
index 0000000..a20abe9
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/02_concepts-overview.md
@@ -0,0 +1,36 @@
+---
+id: version-0.67.0-concepts-overview
+title: StreamPipes Concepts
+sidebar_label: Overview
+original_id: concepts-overview
+---
+
+To understand how StreamPipes works, the knowledge of a few core concepts, illustrated below, will be helpful.
+
+<img class="docs-image" src="/docs/img/02_concepts-overview/01_overview.png" alt="Overview of concepts">
+
+## Adapter
+An adapter connects to any external data source and forwards received events to the internal StreamPipes system. Within StreamPipes, the output of adapters are available in form of the two primary building blocks **Data Set** and **Data Stream**.
+Adapters can be either created by using StreamPipes Connect, a module to easily connect to new data sources directly from the user interface, or by defining an adapter using the provided Software Development Kit (SDK).
+
+## Data Set / Data Stream
+**Data Streams** and **Data Sets** represent the primary source for working with events in StreamPipes.
+A stream is an ordered sequence of events, where an event typically consists of one or more observation values and additional metadata. The "structure" (or schema) of an event provided by a data stream or set is stored in the internal semantic schema registry of StreamPipes.
+While data streams are typically unbounded, data sets have a fixed end and are internally "replayed" by the system from beginning to end once they are used as part of a pipeline.
+As follows, although when referring to data streams, most concepts also apply for data sets.
+
+## Data Processor
+**Data Processors** in StreamPipes transform one or more input data streams into an output data stream.
+Such transformations can be rather simple, e.g. filtering based on a predefined rule or more complex, e.g. applying rule-based or learning-based algorithms on the data.  
+Data Processors can be applied on any data stream that matches the input requirements of a processor. In addition, most processors can be configured by providing user-defined parameters directly in the user interface.
+Processing elements define stream requirements that are a set of minimum properties an incoming event stream must provide. Data processors can keep state or perform stateless operations.
+At runtime, data streams are processed by using one of the underlying runtime wrappers (see the developer guide for more details).
+
+## Data Sink
+**Data Sinks** consume event streams similar to Data Processors, but do not provide an output data stream. As such, data sinks typically perform some action or trigger a visualization as a result of a stream transformation.
+Similar to data processors, sinks also require for the presence of specific input requirements of any bound data stream and can be customized.
+StreamPipes provides several internal data sinks, e.g., to create notifications, visualize live data or persist historical data of incoming streams. In addition, various data sinks are provided to forward data streams to external systems such as databases.
+
+## Pipeline
+A pipeline in Apache StreamPipes describes the transformation process from a data stream to a data sink. Typically, a pipeline consists of at least one data stream (or data set), zero or more data processors and at least one data sink.
+Pipelines are built by users in a graphical way using the **Pipeline Editor** and can be started and stopped at any time.
diff --git a/documentation/website/versioned_docs/version-0.67.0/02_concepts-pipeline.md b/documentation/website/versioned_docs/version-0.67.0/02_concepts-pipeline.md
new file mode 100644
index 0000000..56b972b
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/02_concepts-pipeline.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-concepts-pipelines
+title: Pipelines
+sidebar_label: Pipelines
+original_id: concepts-pipelines
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-configurations.md b/documentation/website/versioned_docs/version-0.67.0/03_use-configurations.md
new file mode 100644
index 0000000..ed63600
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-configurations.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-use-configurations
+title: Configurations
+sidebar_label: Configurations
+original_id: use-configurations
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-connect.md b/documentation/website/versioned_docs/version-0.67.0/03_use-connect.md
new file mode 100644
index 0000000..bf18ddc
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-connect.md
@@ -0,0 +1,69 @@
+---
+id: version-0.67.0-use-connect
+title: StreamPipes Connect
+sidebar_label: StreamPipes Connect
+original_id: use-connect
+---
+
+StreamPipes Connect is the module to connect external data sources with Apache StreamPipes directly from the user interface. 
+StreamPipes Connect offers various adapters for common communication protocols and some specific sensors. Besides connecting data, StreamPipes Connect offers ways to pre-process data without the need to build pipelines and integrates a schema guesser that listens for incoming data and recommends the recognized event schema.
+
+The screenshot below illustrates the data marketplace, which is the start screen of StreamPipes Connect.
+
+<img class="docs-image" src="/docs/img/03_use-connect/01_connect-overview.png" alt="StreamPipes Connect Overview">
+
+## Connecting new data sources
+
+### Data Marketplace
+The data marketplace shows a list of all adapters that are currently installed in Apache StreamPipes. Each adapter offers various configuration options which depend on the specifics of the adapter.
+Adapters are distinguished a) by the data source concept they provide (data set or data stream) and b) the adapter type, where we distinguish between _generic adapters_, which usually implement a generic communication protocol such as MQTT or Apache Kafka or a specific sensor interface (e.g., for Netio power sockets).
+Several filter options are available to find a suitable adapter. The configuration of a new adapter starts with selecting one of the available adapters, which starts an assistant that supports the adapter generation.
+
+### Protocol/Basic Settings
+In the first step, basic configurations need to be provided. For instance, for an Apache PLC4X adapter, the IP address of the PLC needs to be provided. After all values are provided, the "Next" button opens the next step.
+
+### Format Specification
+The next step, format generation, is only available for generic adapters which support different message formats to be sent over the corresponding protocol. Think of a message broker that is able to consume messages in both JSON format or binary format.
+Currently supported formats include XML, various JSON representations, images and CSV. After a format has been selected, further format configurations can be provided (depending on the selected format) to further customize the incoming message format.
+
+<img class="docs-image" src="/docs/img/03_use-connect/02_customize-format.png" alt="StreamPipes Connect Format Selection">
+
+### Schema Editor
+In the next step, based on the previously provided protocol and format settings, the system will either provide the fixed/pre-defined schema of the adapter or, in case of specific adapters, will connect to the underlying system and try to listen for incoming data. After a few seconds, the schema editor will appear that provides a list of detected fields from the incoming events (the schema).
+
+<img class="docs-image" src="/docs/img/03_use-connect/03_schema-editor.png" alt="StreamPipes Connect Schema Editor">
+
+In the toolbar, several configuration options are available which transform the original schema:
+
+* **Add Nested Property**. This option allows to modify the structure of the event by creating a nested structure. The schema can be simply changed by dragging and dropping fields into the nested structure.
+* **Add Static Value**. This option allows to add a field containing a static value (e.g., an identifier) to the event.
+* **Add Timestamp**. This options appends the current timestamp to each incoming event, useful in case the timestamp is not provided by the origin.
+* **Refresh**. Re-triggers the schema guessing.
+* **Delete field**. Select one or more fields by clicking the checkbox on the right and trigger the delete button.
+* **Property scope**. For each field, a property scope can be defined which is either _Measurement_, _Dimension_ or _Header_. These values are later be used in the pipeline editor to assist in configuring pipeline elements and do not have any functional consequence. 
+Use _Measurement_ to indicate the field measures a value (e.g., a temperature value from a sensor), use _Dimension_ for any identifier (e.g., the sensor ID) and use _Header_ for any other metadata such as timestamps.
+
+For each field (also called event property) of the schema, additional configuration options are available by clicking the _Edit_ button:
+
+* **Label**. Used to provide a human-readable label for the field, which will ease the identification of fields when building pipelines.
+* **Runtime Name.** This is the identifier of the field in the underlying message representation format (e.g., the JSON key). Renaming the runtime name will trigger a so-called _transformation rule_ which renames the incoming field name to the new field name before forwarding it to StreamPipes.
+* **Domain Property/Semantic Type**. To help StreamPipes better understand the value which is represented by the field, semantic type information can be given. Up from StreamPipes 0.68.0, the semantic type can be selected from a wide range of available options. Additionally, an URL can be manually provided that indicates the meaning of the value (e.g., http://schema.org/Temperature).
+* **Mark as Timestamp**. Indicates that the selected value represents a timestamp. When selected, a _timestamp converter_ can be configured which will convert incoming timestamps to the UNIX timestamp.
+* **Runtime Type**. Here, the data type can be changed  
+* **Unit**. Allows to specify the unit in which the value is measured. Once selected, you can also automatically convert the unit to a target unit, which will then be inserted into the data stream produced by the adapter (see screenshot below). 
+
+<img class="docs-image" src="/docs/img/03_use-connect/04_schema-editor-conversion.png" alt="StreamPipes Connect Unit Conversion">
+
+### Adapter Generation
+Finally, the adapter is ready to be started. In the _Adapter Generation_ page, a name and description for the resulting data stream must be provided.
+Once started, StreamPipes creates your new adapter and displays a preview of the connected data, which refreshes about once per second.
+Afterwards, the newly created data stream is available in the pipeline editor for further usage.
+
+<img class="docs-image" src="/docs/img/03_use-connect/05_adapter-generation.png" alt="StreamPipes Connect Adapter Generation">
+
+## Managing adapters
+
+Currently running adapters are available in the "Running adapters" section of StreamPipes Connect. Existing adapters can be stopped and deleted. Currently, there is no mechanism to edit an existing adapter or to stop the adapter without deleting it.
+
+### Adapter Templates
+For frequently used configurations, adapter templates can be created. An adapter template is a pre-configured adapter which can be further customized by users. Created adapter templates are available in the marketplace similar to standard adapters.
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-dashboard.md b/documentation/website/versioned_docs/version-0.67.0/03_use-dashboard.md
new file mode 100644
index 0000000..c427b69
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-dashboard.md
@@ -0,0 +1,67 @@
+---
+id: version-0.67.0-use-dashboard
+title: Live Dashboard
+sidebar_label: Live Dashboard
+original_id: use-dashboard
+---
+
+The live dashboard can be used to visualize live data of data streams using a set of visualizations
+The entry page of the live dashboard lists all created dashboards as in the screenshot below:
+
+<img class="docs-image" src="/docs/img/03_use-dashboard/01_dashboard-overview.png" alt="StreamPipes Dashboard Overview">
+
+## Visualizing Data Streams
+
+To visualize data streams in the live dashboard, a pipeline must be created that makes use of the so-called **Dashboard Sink**.
+Any data stream or data processor can serve as an input of the dashboard sink. Switch to the pipeline editor, create a pipeline and configure the dashboard sink. The visualization name is used to identify the sink in case multiple dashboard sinks are used within a single pipeline.
+
+## Managing Dashboards
+Multiple dashboards can be created, e.g., to organize different assets in a single dashboard view.
+
+A new dashboard can be created by clicking the _New Dashboard_ button, which opens a dialog that requires basic dashboard settings such as the title and description of the new dashboard.
+Once created, the dashboard will be shown in the overview. Here, the following dashboard actions are available:
+
+* **Show** opens the dashboard.
+* **Window** opens the dashboard in a new window with reduced controls, e.g., without the StreamPipes navigation and toolbar. This is a useful view for standalone displays that should visualize key parameters.
+* **Settings** allows to modify the basic dashboard settings. 
+* **Edit** opens the dashboard in edit mode, where widgets can be added to the dashboard.
++ **Delete** deletes the selected dashboard.
+
+## Creating Visualizations
+
+Visualizations can be added to each dashboard in form of widgets. To add new visualizations, switch to the dashboard in _Edit_ mode.
+In edit mode, a button appears that allows to add a new visualization.
+
+Adding a new visualization is supported by a wizard consisting of three steps:
+
+<img class="docs-image" src="/docs/img/03_use-dashboard/02_add-widget.png" alt="StreamPipes Dashboard Pipeline Selection">
+
+* **Select pipeline** is the first step where a pipeline is selected on which the visualization is based. In this view, all pipelines are listed that have at least one **Dashboard Sink**. In case a pipeline contains multiple dashboard sinks, the visualization name is listed below the pipeline name which eases discovering of the proper visualization.
+* **Select widget** is the next step where the visualization widget must be selected. StreamPipes automatically filters this list based on input requirements of widgets. For instance, image visualizations are only visible if the input data stream provides an image object.
+* **Configure widget** provides widget-specific settings to configure the visualization. In most cases, colors and titles of widgets can be modified. Additionally, chart-specific settings such as axis value ranges can be configured.
+
+<img class="docs-image" src="/docs/img/03_use-dashboard/03_configure-widget.png" alt="StreamPipes Dashboard Widget Configuration">
+
+By clicking _Create_, the new widget is placed on the canvas. Size and positioning of visualizations can be flexibly changed based on the provided grid. To change the widget configuration, the _Settings_ button of each widget can be clicked to re-open the configuration dialog.
+
+Once created, the dashboard provides a live view of all visualizations:
+
+<img class="docs-image" src="/docs/img/03_use-dashboard/04_live-dashboard.png" alt="StreamPipes Live Dashboard">
+
+
+Before the dashboard is closed, make sure to click the _Save_ button to persist the updated dashboard. Changes can be discarded by clicking the _Discard_ button.
+
+
+## Available widgets
+
+The following visualizations are available in the latest release:
+
+* Area Chart
+* Gauge
+* HTML page (renders HTML markup)
+* Image  
+* Line Chart
+* Raw (displays the raw JSON input for debugging purposes)
+* Single Value (displays a single measurement)
+* Table
+* Traffic Light
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-data-explorer.md b/documentation/website/versioned_docs/version-0.67.0/03_use-data-explorer.md
new file mode 100644
index 0000000..fa71c35
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-data-explorer.md
@@ -0,0 +1,19 @@
+---
+id: version-0.67.0-use-data-explorer
+title: Data Explorer
+sidebar_label: Data Explorer
+original_id: use-data-explorer
+---
+
+The data explorer can be used to visualize and explore data streams that are persisted by using the **Data Lake** sink.
+
+<img class="docs-image" src="/docs/img/03_use-data-explorer/01_data-explorer-overview.png" alt="StreamPipes Data Explorer Overview">
+
+It provides a canvas where various visualizations from multiple pipelines can be placed. A global date and time range can be selected for all displayed visualizations.
+
+The data explorer is currently available as an early beta version and features are still subject to change. A more detailed documentation will be available once the data explorer is available in a stable version.
+
+## Using the data explorer
+
+In the data explorer, any pipeline that uses the so-called **Data Lake** sink can be explored in the data explorer. Switch to the pipeline editor and add the data lake sink to a data processor or stream.
+The sink requires an index name as a configuration parameter, which is used as an identifier in the data explorer.
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-managing-pipelines.md b/documentation/website/versioned_docs/version-0.67.0/03_use-managing-pipelines.md
new file mode 100644
index 0000000..3f68dd2
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-managing-pipelines.md
@@ -0,0 +1,53 @@
+---
+id: version-0.67.0-use-managing-pipelines
+title: Managing Pipelines
+sidebar_label: Managing Pipelines
+original_id: use-managing-pipelines
+---
+
+The pipeline view lists all created pipelines and provides several views and actions to manage the lifecycle of pipelines.
+
+In the entry screen, an overview of all created pipelines is shown:
+
+<img class="docs-image" src="/docs/img/03_use-managing-pipelines/01_pipeline-overview.png" alt="StreamPipes Pipeline Overview">
+
+## Pipeline Actions
+Within the pipeline overview, for each pipeline several actions are available:
+* **Start/Stop pipeline** Starts or stops the selected pipeline. Once clicked, StreamPipes will trigger the selected action for all pipeline elements and open a success or error dialog as illustrated below.
+* **Show details** opens the pipeline detail view (see below).  
+* **Modify pipeline** opens the pipeline in the pipeline editor, where the pipeline can be modified. Note that this button is only visible if the pipeline is not running.
+* **Delete pipeline** opens a confirm dialog, which subsequently deletes the selected pipeline.
+
+The screenshot below shows the status of a pipeline after it has been successfully started. By clicking the _Show details_ button, more information on the status of each corresponding pipeline element microservice becomes available. In case of failures, the failure reason will be shown for each pipeline element that has failed to start.
+
+<img class="docs-image" src="/docs/img/03_use-managing-pipelines/02_pipeline-start-dialog.png" alt="StreamPipes Pipeline Start Dialog">
+
+## Organizing Pipelines into Categories
+Pipelines can be organized into categories, which is a useful feature in case a larger amount of pipelines is created.
+All categories will be shown as separate tabs in the pipeline overview. The same pipeline can be assigned to multiple categories.
+
+To add a new category or to add a new pipeline to an existing category, click the _Manage Categories_ button and configured the category and assigned pipelines in the dialog.
+
+## Pipeline Details
+The pipeline details view can be opened by clicking the _Show details_ button in the pipeline overview panel.
+
+<img class="docs-image" src="/docs/img/03_use-managing-pipelines/03_pipeline-details.png" alt="StreamPipes Pipeline Details">
+
+### Overview
+The overview section displays the graphical structure of the pipeline and provides some statistics about recent pipeline actions. Additionally, pipelines can be directly started, stopped, modified and deletes within this view.
+
+### Monitoring
+Monitoring features will become available in version 0.68.0.
+
+### Errors
+Monitoring of failures and logs will become available in version 0.69.0.
+
+### QuickEdit
+The quick edit feature (only available for pipelines that are not running) is a quick and convenient way to modify some pipeline element configurations without opening the pipeline in the pipeline editor.
+To use the quick edit feature, switch to the _QuickEdit_ tab, which will display the selected pipeline.
+
+By clicking a pipeline element from the preview canvas, available configuration options of the selected pipeline element can be modified. Note that only modifications that do not affect the pipeline structure (e.g., different output streams) can be changed.
+
+<img class="docs-image" src="/docs/img/03_use-managing-pipelines/04_pipeline-quick-edit.png" alt="StreamPipes Pipeline Quick Edit">
+
+After a configuration value was changed, make sure to click the _Update Pipeline_ button to save the changes.
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-notifications.md b/documentation/website/versioned_docs/version-0.67.0/03_use-notifications.md
new file mode 100644
index 0000000..024fefb
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-notifications.md
@@ -0,0 +1,26 @@
+---
+id: version-0.67.0-use-notifications
+title: Notifications
+sidebar_label: Notifications
+original_id: use-notifications
+---
+
+The notification module can be used to create internal notifications.
+
+<img class="docs-image" src="/docs/img/03_use-notifications/01_notifications-overview.png" alt="StreamPipes Notifications">
+
+## Using notifications
+
+Any pipeline that includes the data sink **Notification** can trigger notifications that appear in the notification view. To configure a new notification, switch to the pipeline editor and append the notification sink to a data processor or data stream.
+The sink requires a title and message as configuration parameters.
+
+### Placeholders
+
+The notification message can include placeholders for fields which are replaced with the actual value at runtime.
+
+## Managing notifications
+
+The notification view is split into two parts. The left sides lists all pipelines which include a notification sink. By selecting a pipeline, available notifications will be shown in the right panel.
+By scrolling up, older notifications become visible. Notifications that have appeared in the detail view will be automatically marked as read, so that only new, unread notifications will appear in the left toolbar.
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/03_use-pipeline-editor.md b/documentation/website/versioned_docs/version-0.67.0/03_use-pipeline-editor.md
new file mode 100644
index 0000000..780568a
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/03_use-pipeline-editor.md
@@ -0,0 +1,65 @@
+---
+id: version-0.67.0-use-pipeline-editor
+title: Pipeline Editor
+sidebar_label: Pipeline Editor
+original_id: use-pipeline-editor
+---
+
+The pipeline editor module supports building pipelines that transform a data stream using a set of resuable data processors and data sinks.
+The empty pipeline editor looks similar to the illustration below after a new installation.
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/01_pipeline-editor-overview.png" alt="StreamPipes Pipeline Editor Overview">
+
+## Pipeline Elements
+The four main concepts data sets, data streams, data processors and data sinks are available at the top of the pipeline editor. By switching the tabs, the individual pipeline elements for each category can be found.
+By clicking the questionmark symbol, which appears when hovering over an element, additional information can be viewed (e.g., for data streams a live preview of incoming data and the documentation of the pipeline element for data processors and sinks).
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/02_pipeline-element-info.png" alt="StreamPipes Pipeline Element Info">
+
+## Creating Pipelines
+Pipelines are built by dragging data streams, processors and sinks into the pipeline assembly area. Typically, a pipeline is built step-by-step starting with a data soure (stream or set). 
+Afterwards, data processors and sinks are subsequently added to the pipeline. Connections between pipeline elements are made by selecting the gray connector of the source and moving it to the target pipeline element.
+Once a connection is made, StreamPipes performs a quick validation step and, in case two pipeline elements are compatible, automatically opens a configuration window.
+
+### Configuring Pipeline Elements
+The configuration depends on the selected pipeline element and looks similar to the screenshot below.
+In general, pipeline elements are configured by providing the required values. Once the pipeline element is fully configured, the _Save_ button activates and can be used to save the configuration for the pipeline element.
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/03_configure-pipeline-element.png" alt="StreamPipes Pipeline Element Configuration">
+
+In addition, the following options are available in the pipeline element configuration menu:
+* **Show documentation** extends the view and displays the pipeline element's documentation next to the configuration view.
+* **Show only recommended settings** filters the list of available fields provided by the connected input data stream based on the _property scope_, e.g., so that only measurement values are displayed and dimension fields from the input stream are not available for selection. If deactivated, selections contain the full list of available fields that match the input requirement of the data processor.
+
+### Pipeline Element Options
+Further options for a pipeline element can be displayed by hovering over a pipeline element in the assembly area, so that additional buttons appear around the pipeline element:
+
+* **Configure element** re-opens the configuration view to update the pipeline element configuration (only available for data processors and sinks)
+* **Delete element** removes the pipeline element from the pipeline  
+* **Help** opens the pipeline element's documentation
+* **Compatible element** opens a dialog which shows all pipeline elements that are compatible to the current element's output data stream. The dialog offers an alternative to selecting pipeline elements directly from the pipeline element selection in the top.
+* **Pipeline Element Recommendation** opens a dialog which shows all recommended pipeline elements that are compatible the current element's output data stream. The recommendation is based on previously connected pipeline elements and is displayed below.
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/04_pipeline-element-recommendation.png" alt="StreamPipes Pipeline Element Recommendation">
+
+### Pipeline Editor Options
+Several pipeline editor options are available in the menu bar of the pipeline assembly:
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/05_pipeline-editor-options.png" alt="StreamPipes Pipeline Editor Options">
+
+* **Save pipeline** opens the save dialog (see below)
+* **Pan** allows to pan within the assembly area, useful for larger pipelines that do not fit in the screen
+* **Select** is visible if pan mode is active and switches back to the default select mode
+* **Zoom in/out** triggers the zoom in the pipeline assembly
+* **Auto Layout** layouts the pipeline in a much more beautiful way than you are able to do by yourself ;-)
+* **All pipeline modification saved** is displayed if the current pipeline has been cached. Cache updates are triggered after every change of the pipeline so that changes are not lost after reloading the window.
+* **Hints** are shown to display current errors (e.g., incomplete pipelines). Details can be opened by clicking the hint button.
+* **Clear assembly** clears the assembly and removes the current pipeline.
+
+### Saving a pipeline
+To save a pipeline, press the _save pipeline_ button. A dialog pops up where a name and description of the pipeline can be entered (only name is mandatory).
+Additionally, a pipeline can be directly started after it has been stored by checking the corresponding button.
+
+<img class="docs-image" src="/docs/img/03_use-pipeline-editor/06_save-pipeline.png" alt="StreamPipes Save Pipeline Dialog">
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/05_deploy-docker.md b/documentation/website/versioned_docs/version-0.67.0/05_deploy-docker.md
new file mode 100644
index 0000000..4626350
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/05_deploy-docker.md
@@ -0,0 +1,74 @@
+---
+id: version-0.67.0-deploy-docker
+title: Docker Deployment
+sidebar_label: Docker Deployment
+original_id: deploy-docker
+---
+
+StreamPipes Compose is a simple collection of user-friendly `docker-compose` files that easily lets gain first-hand experience with Apache StreamPipes.
+
+> **NOTE**: We recommend StreamPipes Compose to only use for initial try-out and testing. If you are a developer and want to develop new pipeline elements or core feature, use the [StreamPipes CLI](../cli).
+
+#### TL;DR: A one-liner to rule them all :-)
+
+```bash
+docker-compose up -d
+```
+Go to http://localhost to finish the installation in the browser. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline!
+
+## Prerequisites
+* Docker >= 17.06.0
+* Docker-Compose >= 1.17.0 (Compose file format: 3.4)
+* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge
+
+Tested on: **macOS, Linux, Windows 10** (CMD, PowerShell, GitBash)
+
+**macOS** and **Windows 10** (Pro, Enterprise, Education) users can easily get Docker and Docker-Compose on their systems by installing **Docker for Mac/Windows** (recommended).
+
+> **NOTE**: On purpose, we disabled all port mappings except of http port **80** to access the StreamPipes UI to provide minimal surface for conflicting ports.
+
+## Usage
+We provide two options to get you going:
+
+- **default**: a light-weight option with few pipeline elements, needs less memory
+- **full**:  contains more pipeline elements, requires **>16 GB RAM** (recommended)
+
+**Starting** the **default** option is as easy as simply running:
+> **NOTE**: Starting might take a while since `docker-compose up` also initially pulls all Docker images from Dockerhub.
+
+```bash
+docker-compose up -d
+# go to after all services are started http://localhost
+```
+After all containers are successfully started just got to your browser and visit http://localhost to finish the installation. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline!
+
+**Stopping** the **default** option is similarly easy:
+```bash
+docker-compose down
+# if you want to remove mapped data volumes, run:
+# docker-compose down -v
+```
+
+Starting the **full** option is almost the same, just specify the `docker-compose.full.yml` file:
+```bash
+docker-compose -f docker-compose.full.yml up -d
+# go to after all services are started http://localhost
+```
+Stopping the **full** option:
+```bash
+docker-compose -f docker-compose.full.yml down
+#docker-compose -f docker-compose.full.yml down -v
+```
+
+## Update services
+To actively pull the latest available Docker images use:
+```bash
+docker-compose pull
+# docker-compose -f docker-compose.full.yml pull
+```
+
+## Upgrade
+To upgrade to another StreamPipes version, simply edit the `SP_VERSION` in the `.env` file.
+```
+SP_VERSION=<VERSION>
+```
diff --git a/documentation/website/versioned_docs/version-0.67.0/05_deploy-kubernetes.md b/documentation/website/versioned_docs/version-0.67.0/05_deploy-kubernetes.md
new file mode 100644
index 0000000..36a55b2
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/05_deploy-kubernetes.md
@@ -0,0 +1,61 @@
+---
+id: version-0.67.0-deploy-kubernetes
+title: Kubernetes Deployment
+sidebar_label: Kubernetes Deployment
+original_id: deploy-kubernetes
+---
+
+## Prerequisites
+Requires Helm (https://helm.sh/) and an active connection to a kubernetes cluster with a running tiller server.
+
+Tested with:
+* K3s v1.18.8+k3s1 (6b595318) with K8s v1.18.8
+* Helm v3.1.2
+
+## Usage
+We provide two helm chart options to get you going:
+
+- **default**: a light-weight option with few pipeline elements, needs less memory
+- **full**:  contains more pipeline elements, requires **>16 GB RAM** (recommended)
+
+**Starting** the **default** helm chart option is as easy as simply running the following command from the root of this folder:
+> **NOTE**: Starting might take a while since we also initially pull all Docker images from Dockerhub.
+
+```bash
+helm install streampipes ./
+```
+After a while, all containers should successfully started, indicated by the `Running` status.
+```bash
+kubectl get pods
+NAME                                           READY   STATUS    RESTARTS   AGE
+activemq-66d58f47cf-2r2nb                      1/1     Running   0          3m27s
+backend-76ddc486c8-nswpc                       1/1     Running   0          3m27s
+connect-master-7b477f9b79-8dfvr                1/1     Running   0          3m26s
+connect-worker-78d89c989c-9v8zs                1/1     Running   0          3m27s
+consul-55965f966b-gwb7l                        1/1     Running   0          3m27s
+couchdb-77db98cf7b-xnnvb                       1/1     Running   0          3m27s
+influxdb-b95b6479-r8wh8                        1/1     Running   0          3m27s
+kafka-657b5fb77-dp2d6                          1/1     Running   0          3m27s
+pipeline-elements-all-jvm-79c445dbd9-m8xcs     1/1     Running   0          3m27s
+sources-watertank-simulator-6c6b8844f6-6b4d7   1/1     Running   0          3m27s
+ui-b94bd9766-rm6zb                             2/2     Running   0          3m27s
+zookeeper-5d9947686f-6nzgs                     1/1     Running   0          3m26s
+```
+
+After all containers are successfully started just got to your browser and visit any of the k8s cluster nodes on
+`http://<NODE_IP>` to finish the installation.
+
+> **NOTE**: If you're running Docker for Mac or Docker for Windows with a local k8s cluster, the above step to use your host IP might not work. Luckily, you can port-forward a service port to your localhost using the following command to be able to access the UI either via `http://localhost` or `http://<HOST_IP>` (you require sudo to run this command in order to bind to a privileged port).
+```bash
+kubectl port-forward svc/ui --address=0.0.0.0 80:80
+```
+
+Starting the **full** helm chart option is almost the same:
+```bash
+helm install streampipes ./ --set deployment=full
+```
+
+**Deleting** the current helm chart deployment:
+```bash
+helm del streampipes
+```
diff --git a/documentation/website/versioned_docs/version-0.67.0/05_deploy-use-ssl.md b/documentation/website/versioned_docs/version-0.67.0/05_deploy-use-ssl.md
new file mode 100644
index 0000000..0d225ca
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/05_deploy-use-ssl.md
@@ -0,0 +1,36 @@
+---
+id: version-0.67.0-deploy-use-ssl
+title: Use SSL
+sidebar_label: Use SSL
+original_id: deploy-use-ssl
+---
+
+This page explains how SSL Certificates can be used to provide transport layer security between your Browser and the Streampipes Backend.
+
+## Prerequisites
+You need a valid Certificate consisting of a Private and a Public Key. Both Keys must be in PEM Format. Please note that your Private Key should never be shared, otherwise the communication can not be considered secure.
+
+## Edit docker-compose.yml
+In order to use SSL you have to open port 443 on the nginx Service. Incoming insecure Traffic on Port 80 will be automatically rerouted to Port 443.
+
+The Environment-Variable NGINX_SSL must be set to "true".
+
+Finally you have to inject the Certificates into the Docker-Container. In the example below, the Certificates are placed in the directory /etc/ssl/private/ on the host machine. Please change the path according to the place where the Certificates are located on your machine. The path after the colon should not be changed!
+```yaml
+[...]
+  nginx:
+    image: apachestreampipes/ui
+    ports:
+      - "80:80"
+      - "443:443"
+    environment:
+      - NGINX_SSL=true
+    volumes:
+      - /etc/ssl/private/private.pem:/etc/nginx/ssl/ssl.pem
+      - /etc/ssl/private/public.pem:/etc/nginx/ssl/cert.pem
+    depends_on:
+      - backend
+    networks:
+      spnet:
+[...]
+```
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-archetypes.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-archetypes.md
new file mode 100644
index 0000000..98955a3
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-archetypes.md
@@ -0,0 +1,144 @@
+---
+id: version-0.67.0-extend-archetypes
+title: Maven Archetypes
+sidebar_label: Maven Archetypes
+original_id: extend-archetypes
+---
+
+In this tutorial we explain how you can use the Maven archetypes to develop your own StreamPipes processors and sinks.
+We use IntelliJ in this tutorial, but it works with any IDE of your choice.
+
+## Prerequisites
+You need to have Maven installed, further you need an up and running StreamPipes installation on your development computer.
+To ease the configuration of environment variables, we use the IntelliJ [env Plugin](https://plugins.jetbrains.com/plugin/7861-envfile).
+Install this in IntelliJ. The development also works without the plugin, then you have to set the environment variables manually instead of using the env configuration file.
+
+## Create Project
+To create a new project, we provide multiple Maven Archteypes.
+Currently, we have archetypes for the JVM and Flink wrappers, each for processors and sinks.
+The commands required to create a new pipeline element project can be found below. Make sure that you select a version compatible with your StreamPipes installation.
+Copy the command into your terminal to create a new project.
+The project will be created in the current folder.
+First, the ``groupId`` of the resulting Maven artifact must be set.
+We use ``groupId``: ``org.example`` and ``artifactId``: ``ExampleProcessor``.
+You can keep the default values for the other settings, confirm them by hitting enter.
+Now, a new folder with the name ``ExampleProcessor`` is generated.
+
+The current {sp.version} is 0.67.0
+
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-jvm  \
+  -DarchetypeVersion={sp.version}
+```
+<details class="info">
+    <summary>Select: [Processors / Sinks] [JVM / Flink]</summary>
+
+## Processors JVM
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-jvm  \
+  -DarchetypeVersion={sp.version}
+```
+
+## Processors Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-flink  \
+  -DarchetypeVersion={sp.version}
+```
+
+## Sinks JVM
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-sinks-jvm  \
+  -DarchetypeVersion={sp.version}
+```
+
+## Sinks Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-sinks-flink  \
+  -DarchetypeVersion={sp.version}
+```
+</details>
+
+
+## Edit Processor
+Open the project in your IDE.
+If everything worked, the structure should look similar to the following image.
+The *config* package contains all the configuration parameters of your processors / sinks.
+In the *main* package, it is defined which processors / sinks you want to activate and the *pe.processor.example* package contains three classes with the application logic.
+For details, have a look at the other parts of the Developer Guide, where these classes are explained in more depth.
+
+<img src="/docs/img/archetype/project_structure.png" width="30%" alt="Project Structure">
+
+Open the class *Example* and edit the ``onEvent`` method to print the incoming event, log it to the console and send it to the next component without changing it.
+
+```java
+@Override
+public void onEvent(Event event, SpOutputCollector collector) {
+    // Print the incoming event on the console
+    System.out.println(event);
+
+    // Hand the incoming event to the output collector without changing it.
+    collector.collect(event);
+}
+```
+
+## Start Processor
+Before the processor can be started, you need to edit the *env* file in the *development* folder.
+Replace all local hosts in this file with the IP address or DNS name of your computer.
+This is relevant to make the mapping between the services running in Docker and your component running in the local IDE.
+After all updates are updated, this file is used by the envfile plugin to provide configuration parameters to the pipeline element.
+Alternatively, environment variables can also be set on your host or IDE.
+Now start the project by clicking on **(Run -> Edit Configuration)**.
+Add a new configuration in the Configuration menu by clicking on the + sign and select **Application**.
+Name the configuration *ExampleProcessor* and select the *Init* class as the 'Main class'.
+Then set *ExampleProcessor* in 'Use classpath of module'.
+
+
+As the last step, switch to the tab *EnvFile* and load the env file.
+Click on 'Enable EnvFile' to activate it and add the just edited env file by clicking on the + sign.
+Save all the changes by clicking *Apply*.
+Now you can start the processor.
+
+<div class="my-carousel">
+    <img src="/docs/img/archetype/run_configuration.png" alt="Configuration View">
+    <img src="/docs/img/archetype/run_env_configuration.png" alt="Environment Configuration View">
+</div>
+
+To check if the service is up and running, open the browser on *'localhost:6666'*. The machine-readable description of the processor should be visible as shown below.
+
+<img src="/docs/img/archetype/endpoint.png" width="90%" alt="Project Structure">
+
+
+<div class="admonition error">
+<div class="admonition-title">Common Problems</div>
+<p>
+If the service description is not shown on 'localhost:6666', you might have to change the port address.
+This needs to be done in the configuration of your service, further explained in the configurations part of the developer guide.
+
+If the service does not show up in the StreamPipes installation menu, click on 'MANAGE ENDPOINTS' and add 'http://<span></span>YOUR_IP_OR_DNS_NAME:6666'.
+Use the IP or DNS name you provided in the env file.
+After adding the endpoint, a new processor with the name *Example* should show up.
+</p>
+</div>
+
+Now you can go to StreamPipes.
+Your new processor *'Example'* should now show up in the installation menu.
+Install it, then switch to the pipeline view and create a simple pipeline that makes use of your newly created processor.
+In case you opened the StreamPipes installation for the first time, it should have been automatically installed during the setup process.
+
+<img src="/docs/img/archetype/example_pipeline.png" width="80%" alt="Project Structure">
+
+Start this pipeline.
+Now you should see logging messages in your console and, once you've created a visualization, you can also see the resulting events of your component in StreamPipes.
+
+Congratulations, you have just created your first processor!
+From here on you can start experimenting and implement your own algorithms.
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-cli.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-cli.md
new file mode 100644
index 0000000..1b29a84
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-cli.md
@@ -0,0 +1,187 @@
+---
+id: version-0.67.0-extend-cli
+title: StreamPipes CLI
+sidebar_label: StreamPipes CLI
+original_id: extend-cli
+---
+
+The StreamPipes command-line interface (CLI) is focused on developers in order to provide an easy entrypoint to set up a suitable dev environment, either planning on developing
+
+* new extensions such as **connect adapters, processors, sinks** or,
+* new core features for **backend** and **ui**.
+
+## TL;DR
+
+```bash
+streampipes env --list
+[INFO] Available StreamPipes environment templates:
+pipeline-element
+...
+streampipes env --set pipeline-element
+streampipes up -d
+```
+> **NOTE**: use `./streampipes` if you haven't add it to the PATH and sourced it (see section "Run `streampipes` from anywhere?").
+
+## Prerequisites
+The CLI is basically a wrapper around multiple `docker` and `docker-compose` commands plus some additional sugar.
+
+* Docker >= 17.06.0
+* Docker-Compose >= 1.26.0 (Compose file format: 3.4)
+* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge
+* For Windows Developer: GitBash only
+
+
+Tested on: **macOS**, **Linux**, **Windows***)
+
+> **NOTE**: *) If you're using Windows the CLI only works in combination with GitBash - CMD, PowerShell won't work.
+
+
+## CLI commands overview
+
+```
+StreamPipes CLI - Manage your StreamPipes environment with ease
+
+Usage: streampipes COMMAND [OPTIONS]
+
+Options:
+  --help, -h      show help
+  --version, -v   show version
+
+Commands:
+  clean       Remove StreamPipes data volumes, dangling images and network
+  down        Stop and remove StreamPipes containers
+  env         Inspect and select StreamPipes environments
+  info        Get information
+  logs        Get container logs for specific container
+  ps          List all StreamPipes container for running environment
+  pull        Download latest images from Dockerhub
+  restart     Restart StreamPipes environment
+  up          Create and start StreamPipes container environment
+
+Run 'streampipes COMMAND --help' for more info on a command.
+```
+
+## Usage: Along dev life-cycle
+
+**List** available environment templates.
+```bash
+streampipes env --list
+```
+
+**Inspect** services in an available environment to know what kind of services it is composed of.
+```bash
+streampipes env --inspect pipeline-element
+```
+
+**Set** environment, e.g. `pipeline-element`, if you want to write a new pipeline element.
+```bash
+streampipes env --set pipeline-element
+```
+
+**Start** environment ( default: `dev` mode). Here the service definition in the selected environment is used to start the multi-container landscape.
+> **NOTE**: `dev` mode is enabled by default since we rely on open ports to core service such as `consul`, `couchdb`, `kafka` etc. to reach from the IDE when developing. If you don't want to map ports (except the UI port), then use the `--no-ports` flag.
+
+```bash
+streampipes up -d
+# start in production mode with unmapped ports
+# streampipes up -d --no-ports
+```
+Now you're good to go to write your new pipeline element :tada: :tada: :tada:
+
+> **HINT for extensions**: Use our [Maven archetypes](https://streampipes.apache.org/docs/docs/dev-guide-archetype/) to setup a project skeleton and use your IDE of choice for development. However, we do recommend using IntelliJ.
+
+> **HINT for core**: To work on `backend` or `ui` features you need to set the template to `backend` and clone the core repository [incubator-streampipes](https://github.com/apache/incubator-streampipes) - check the prerequisites there for more information.
+
+**Stop** environment and remove docker container
+```bash
+streampipes down
+# want to also clean docker data volumes when stopping the environment?
+# streampipes down -v
+```
+
+## Additionally, useful commands
+
+**Start individual services only?** We got you! You chose a template that suits your needs and now you only want to start individual services from it, e.g. only Kafka and Consul.
+
+> **NOTE**: the service names need to be present and match your current `.spenv` environment.
+
+```bash
+streampipes up -d kafka consul
+```
+
+**Get current environment** (if previously set using `streampipes env --set <environment>`).
+```bash
+streampipes env
+```
+
+**Get logs** of specific service and use optional `--follow` flag to stay attached to the logs.
+```bash
+streampipes logs --follow backend
+```
+
+**Update** all services of current environment
+```bash
+streampipes pull
+```
+
+**Restart** all services of current environment or specific services
+```bash
+streampipes restart
+# restart backend & consul
+# streampipes restart backend consul
+```
+
+**Clean** your system and remove created StreamPipes Docker volumes, StreamPipes docker network and dangling StreamPipes images of old image layers.
+```bash
+streampipes clean
+# remove volumes, network and dangling images
+# streampipes clean --volumes
+```
+
+## Modify/Create an environment template
+As of now, this step has to be done **manually**. All environments are located in `environments/`.
+
+```bash
+β”œβ”€β”€ adapter               # developing a new connect adapter
+β”œβ”€β”€ backend               # developing core backend features
+β”œβ”€β”€ basic                 # wanna run core, UI, connect etc from the IDE?
+β”œβ”€β”€ full                  # full version containing more pipeline elements
+β”œβ”€β”€ lite                  # few pipeline elements, less memory  
+β”œβ”€β”€ pipeline-element      # developing new pipeline-elements
+└── ui                    # developing UI features
+```
+**Modifying an existing environment template**. To modify an existing template, you can simply add a `<YOUR_NEW_SERVICE>` to the template.
+> **NOTE**: You need to make sure, that the service your are adding exists in `deploy/standalone/service/<YOUR_NEW_SERVICE>`. If your're adding a completely new service take a look at existing ones, create a new service directory and include a `docker-compose.yml` and `docker-compose.dev.yml` file.
+
+```
+[environment:backend]
+activemq
+kafka
+...
+<YOUR_NEW_SERVICE>
+```
+
+**Creating a new** environment template. To create a new environment template, place a new file `environments/<YOUR_NEW_ENVIRONMENT>` in the template directory. Open the file and use the following schema.
+> **IMPORTANT**: Please make sure to have `[environment:<YOUR_NEW_ENVIRONMENT>]` header in the first line of your new template matching the name of the file. Make sure to use small caps letters (lowercase) only.
+
+```
+[environment:<YOUR_NEW_ENVIRONMENT>]
+<SERVICE_1>
+<SERVICE_2>
+...
+```
+
+## Run `streampipes` from anywhere? No problem
+Simply add the path to this cli directory to your `$PATH` (on macOS, Linux) variable, e.g. in your `.bashrc` or `.zshrc`, or `%PATH%` (on Windows).
+
+For **macOS**, or **Linux**:
+
+```bash
+export PATH="/path/to/incubator-streampipes-installer/cli:$PATH"
+```
+
+For **Windows 10**, e.g. check this [documentation](https://helpdeskgeek.com/windows-10/add-windows-path-environment-variable/).
+
+
+## Upgrade to new version
+To upgrade to a new version, simply edit the version tag `SP_VERSION` in the `.env` file.
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-event-model.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-event-model.md
new file mode 100644
index 0000000..5371066
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-event-model.md
@@ -0,0 +1,142 @@
+---
+id: version-0.67.0-extend-sdk-event-model
+title: SDK Guide: Event Model
+sidebar_label: SDK: Event Model
+original_id: extend-sdk-event-model
+---
+
+## Introduction
+
+The 0.61.0 release of StreamPipes introduces a new event model that replaces the ``Map`` representation of events at runtime. This guide explains the usage of the new event model to manipulate runtime events for data processors and data sink.
+
+## Reference
+
+This guide assumes that you are already familiar with the basic setup of [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+### Property Selectors
+
+In most cases, fields that are subject to be transformed by pipeline elements are provided by the assigned ``MappingProperty`` (see the guide on [static properties](dev-guide-static-properties.md)).
+
+Mapping properties return a ``PropertySelector`` that identifies a field based on (i) the **streamIndex** and (ii) the runtime name of the field.
+Let's assume we have an event with the following structure:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "sensor1",
+    "running" : true,
+    "location" : {"latitude" : 34.4, "longitude" : -47},
+    "lastValues" : [45, 22, 21]
+}
+```
+
+In addition, we assume that a data processor exists (with one input node) that converts the temperature value (measured in degrees celsius) to a degree fahrenheit value.
+In this case, a mapping property (selected by the pipeline developer in the StreamPipes UI) would link to the ``temperature`` field of the event.
+
+The mapping property value will be the ``PropertySelector`` of the temperature value, which looks as follows:
+
+```
+s0::temperature
+```
+
+``s0`` identifies the stream (in this case, only one input streams exist, but as data processors might require more than one input stream, a stream identifier is required), while the appendix identifies the runtime name.
+
+Note: If you add a new field to an input event, you don't need to provide the selector, you can just assign the runtime name as defined by the [output strategy](dev-guide-output-strategies.md).
+
+### Reading Fields
+
+You can get a field from an event by providing the corresponding selector:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  PrimitiveField temperatureField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsPrimitive();
+  }
+
+```
+
+Similarly, if your mapping property links to a nested property, use
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  NestedField nestedField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsNested();
+  }
+
+```
+
+and for a list-based field:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  ListField listField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsList();
+  }
+
+```
+
+### Parsing Fields
+
+#### Primitive Fields
+
+A ``PrimitiveField`` contains convenience methods to directly cast a field to the target datatype:
+
+```java
+
+// parse the value as a float datatype
+Float temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsFloat();
+
+// or do the same with a double datatype
+Double temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsDouble();
+
+// extracting a string
+String deviceId = event.getFieldBySelector(deviceIdSelector).getAsPrimitive().getAsString();
+
+// this also works for extracting fields from nested fields:
+Double latitude = event.getFieldBySelector(latitudeSelector).getAsPrimitive().getAsDouble();
+
+// extracting boolean values
+Boolean running = event.getFieldBySelector(runningSelector).getAsPrimitive().getAsBoolean();
+```
+
+In rare cases, you might want to receive a field directly based on the runtime name as follows:
+
+```java
+Double temperature = event.getFieldByRuntimeName("temperature").getAsPrimitive().getAsDouble();
+```
+
+#### List Fields
+
+Lists can also be retrieved by providing the corresponding selector and can automatically be parsed to a list of primitive datatypes:
+
+```java
+
+List<Integer> lastValues = event.getFieldBySelector(lastValueSelector).getAsList().parseAsSimpleType(Integer.class);
+
+```
+
+(coming soon: parsing complex lists)
+
+
+### Adding/Updating Fields
+
+Primitive fields can easily be added to an event by providing the runtime name and the object:
+
+```java
+
+    // add a primitive field with runtime name "city" and value "Karlsruhe"
+    event.addField("city", "Karlsruhe");
+
+    // remove the field "temperature" from the event
+    event.removeFieldBySelector(temperatureSelector);
+
+    // add a new field
+    event.addField("fahrenheit", 48);
+```
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-output-strategies.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-output-strategies.md
new file mode 100644
index 0000000..95f3d87
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-output-strategies.md
@@ -0,0 +1,347 @@
+---
+id: version-0.67.0-extend-sdk-output-strategies
+title: SDK Guide: Output Strategies
+sidebar_label: SDK: Output Strategies
+original_id: extend-sdk-output-strategies
+---
+
+## Introduction
+In StreamPipes, output strategies determine the output of a data processor.
+As the exact input schema of a processor is usually not yet known at development time (as processors can be connected with any stream that matches their requirements), output strategies are a concept to define how an input data stream is transformed to an output data stream.
+
+The following reference describes how output strategies can be defined using the SDK.
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://www.github.com/apache/incubator-streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/outputstrategy/">Github</a>.</p>
+</div>
+
+## Reference
+
+The methods described below to create static properties are available in the ``ProcessingElementBuilder`` class and are usually used in the ``declareModel`` method of the controller class.
+
+As follows, we will use the following example event to explain how output strategies define the output of a data processor:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1"
+
+}
+```
+
+### Keep Output
+
+A ``KeepOutputStrategy`` declares that the output event schema will be equal to the input event schema.
+In other terms, the processor does not change the schema, but might change the values of event properties.
+
+A keep output strategy can be defined as follows:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".keep", "Keep output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.anyProperty())
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // declaring a keep output strategy
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+
+```
+
+According to the example above, the expected output event schema of the example input event would be:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1"
+
+}
+```
+
+Data processors that perform filter operations (e.g., filtering temperature values that are above a given threshold) are a common example for using keep output strategies.
+
+
+### Fixed Output
+
+A ``FixedOutputStrategy`` declares that the data processor itself provides the event schema. The output schema does not depend on the input event.
+
+Fixed output strategies need to provide the event schema they produce at development time:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".fixed", "Fixed output example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.anyProperty())
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // the fixed output strategy provides the schema
+            .outputStrategy(OutputStrategies.fixed(EpProperties.timestampProperty("timestamp"),
+                    EpProperties.doubleEp(Labels.from("avg", "Average value", ""), "avg", SO.Number)))
+
+            .build();
+  }
+
+```
+
+In this example, we declare that the output schema always consists of two fields (``timestamp`` and ``avg``).
+
+Therefore, an output event should look like:
+
+```json
+{
+    "timestamp" : 1234556,
+    "avg" : 36.0
+}
+```
+
+
+### Append Output
+
+An ``AppendOutputStrategy`` appends additional fields to a schema of an incoming event stream. For instance, data processors that perform enrichment operations usually make use of append output strategies.
+
+Similar to the fixed output strategy, the additional fields must be provided at development time in the controller method as follows:
+
+```java
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".append", "Append output example", "")
+
+            // boilerplate code not relevant here, see above
+
+            // declaring an append output
+            .outputStrategy(OutputStrategies.append(EpProperties.integerEp(Labels.from("avg",
+                    "The average value", ""), "avg", SO.Number)))
+
+            .build();
+  }
+```
+
+In this case, the output event would have an additional field ``avg``:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1",
+    "avg" : 123.0
+
+}
+```
+
+### Custom Output
+
+In some cases, pipeline developers using the StreamPipes UI should be able to manually select fields from an input event schema. For such use cases, a ``CustomOutputStrategy`` can be used:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".custom", "Custom output example", "")
+
+            // boilerplate code not relevant here, see above
+
+            // declaring a custom output
+            .outputStrategy(OutputStrategies.custom())
+
+            .build();
+  }
+
+```
+
+If a data processor defines a custom output strategy, the customization dialog in the pipeline editor will show a dialog to let users select the fields to keep:
+
+<img src="/docs/img/dev-guide-output-strategies/os-custom.png" width="80%" alt="Number Parameter">
+
+Taking our example, and assuming that the user selects both the ``timestamp`` and the ``temperature`` the expected output event should look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0
+}
+```
+
+How do we know which fields were selected once the data processor is invoked? Use the proper method from the extractor in the ``onInvocation`` method:
+
+```java
+@Override
+  public ConfiguredEventProcessor<DummyParameters> onInvocation(DataProcessorInvocation graph, ProcessingElementParameterExtractor extractor) {
+
+    List<String> outputSelectors = extractor.outputKeySelectors();
+
+    return new ConfiguredEventProcessor<>(new DummyParameters(graph), DummyEngine::new);
+  }
+```
+
+### Transform Output
+
+A ``TransformOutputStrategy`` declares that one or more fields of an incoming event stream are transformed. Transformations can be applied to the datatype of the property, the runtime name of the property, or any other scheam-related declaration such as measurement units.
+
+#### Static Transform Operations
+
+Static transform operations do not depend on any user input (at pipeline development time) in order to know how to transform a field of an incoming event schema.
+
+Let's say our data processor transforms strings (that are actually a number) to a number datatype. In this case, we can use a static transform output strategy:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".transform", "Transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // static transform operation
+            .outputStrategy(OutputStrategies.transform(TransformOperations
+                    .staticDatatypeTransformation("str", Datatypes.Long)))
+
+            .build();
+  }
+
+```
+
+Note the mapping property that we use to determine which field of the input event should be transformed.
+
+The expected output event would look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : 1
+}
+```
+
+#### Dynamic Transform Operations
+
+Sometimes, user input depends on the exact transform output. Let's take a field renaming processor as an example, which lets the user rename a field from an input event schema to another field name.
+For such use cases, we can use a ``DynamicTransformOperation``:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".transform", "Transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // the text input to enter the new runtime name
+            .requiredTextparameter(Labels.from("new-runtime-name", "New Runtime Name", ""))
+
+            // static transform operation
+            .outputStrategy(OutputStrategies.transform(TransformOperations
+                    .dynamicRuntimeNameTransformation("str", "new-runtime-name")))
+
+            .build();
+  }
+
+```
+
+For dynamic transform operations, an additional identifier that links to another static property can be assigned and later be fetched in the ``onInvocation`` method.
+
+Assuming we want to rename the field ``temperature`` to ``temp``, the resulting output event should look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temp" : 37.0,
+    "deviceId" : 1
+}
+```
+
+### Custom Transform Output
+
+Finally, in some cases the output schema cannot be described at pipeline development time. For these (usually rare) cases, a ``CustomTransformOutput`` strategy can be used.
+
+In this case, a callback function will be invoked in the controller class just after a user has filled in any static properties and clicks on ``Save`` in the pipeline editor.
+
+To define a custom transform output, we need to implement an interface in the controller class:
+
+```java
+public class CustomTransformOutputController extends
+        StandaloneEventProcessingDeclarer<DummyParameters> implements
+        ResolvesContainerProvidedOutputStrategy<DataProcessorInvocation, ProcessingElementParameterExtractor> {
+
+
+@Override
+  public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException {
+
+  }
+}
+```
+
+In addition, the output strategy must be declared in the ``declareModel`` method:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".customtransform", "Custom transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // declare a custom transform output
+            .outputStrategy(OutputStrategies.customTransformation())
+
+            .build();
+  }
+
+```
+
+Once a new pipeline using this data processor is created and the configuration is saved, the ``resolveOutputStrategy`` method will be called, so that an event schema can be provided based on the given configuration. An extractor instance (see the guide on static properties) is available to extract the selected static properties and the connected event stream.
+
+```java
+@Override
+  public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException {
+    return new EventSchema(Arrays
+            .asList(EpProperties
+                    .stringEp(Labels.from("runtime", "I was added at runtime", ""), "runtime", SO.Text)));
+  }
+```
+
+In this example, the output event schema should look like this:
+
+```json
+{
+    "runtime" : "Hello world!"
+}
+```
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-static-properties.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-static-properties.md
new file mode 100644
index 0000000..25b155b
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-static-properties.md
@@ -0,0 +1,265 @@
+---
+id: version-0.67.0-extend-sdk-static-properties
+title: SDK Guide: Static Properties
+sidebar_label: SDK: Static Properties
+original_id: extend-sdk-static-properties
+---
+
+## Introduction
+Static properties represent user-faced parameters that are provided by pipeline developers.
+Processing elements can specify required static properties, which will render different UI views in the pipeline editor.
+
+The following reference describes how static properties can be defined using the SDK.
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://github.com/apache/incubator-streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/staticproperty">Github</a>.</p>
+</div>
+
+## Reference
+
+The methods described below to create static properties are available in the ``ProcessingElementBuilder`` and ``DataSinkBuilder`` classes and are usually used in the ``declareModel`` method of the controller class.
+
+### Mapping property
+
+In StreamPipes, processing elements usually operate on fields of an event stream. For instance, a filter processor operates on a specific field from an input stream (e.g., a field measuring the temperature).
+Typically, pipeline developers should select the exact field where the operations is applied upon by themselves.
+As this field is not yet known at pipeline element development time (as it is defined by the pipeline developer in the pipeline editor), mapping properties serve to map a stream requirement to a specific field from the actual input event stream.
+
+### Unary mapping property
+
+A unary mapping property maps a stream requirement to an actual field of an event stream. Therefore, the ``StreamRequirementsBuilder`` provides the opportunity to directly add a mapping property based along with a property requirement:
+
+```java
+.requiredStream(StreamRequirementsBuilder.
+    create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(),
+            Labels.from("mp-key", "My Mapping", ""),
+            PropertyScope.NONE)
+    .build())
+```
+
+This leads to a selection dialog in the pipeline element customization which provides the user with a selection of all event properties (fields) from the input stream that match the specified property requirement:
+
+<img src="/docs/img/dev-guide-static-properties/sp-mapping-unary.png" width="80%" alt="Text">
+
+At invocation time, the value can be extracted in the ``onInvocation`` method as follows:
+
+```java
+// Extract the mapping property value
+String mappingPropertySelector = extractor.mappingPropertyValue("mp-key");
+```
+
+Note that this method returns a ``PropertySelector``, which can be used by the event model to extract the actual value of this field.
+
+### N-ary mapping property
+
+N-ary mapping properties work similar to unary mapping properties, but allow the mapping of one requirement to multiple event properties matching the requirement:
+
+```java
+.requiredStream(StreamRequirementsBuilder.
+    create()
+    .requiredPropertyWithNaryMapping(EpRequirements.numberReq(),
+            Labels.from("mp-key", "My Mapping", ""),
+            PropertyScope.NONE)
+    .build())
+```
+
+This renders the following selection, where users can select more than one matching event property:
+
+<img src="/docs/img/dev-guide-static-properties/sp-mapping-nary.png" width="80%" alt="Text">
+
+The following snippet returns a list containing the property selectors of all event properties that have been selected:
+
+```java
+// Extract the mapping property value
+List<String> mappingPropertySelectors = extractor.mappingPropertyValues("mp-key");
+```
+
+### Free-Text Parameters
+
+A free-text parameter requires the pipeline developer to enter a single value - which can be a string or another primitive data type.
+The input of free-text parameters can be restricted to specific value ranges or can be linked to the value set of a connected input data stream.
+
+#### Text Parameters
+
+A text parameter lets the user enter a string value. The following code line in the controller class
+
+```java
+.requiredTextParameter(Labels.from(SP_KEY, "Example Name", "Example Description"))
+```
+
+leads to the following input dialog in the pipeline editor:
+
+<img src="/docs/img/dev-guide-static-properties/sp-text-parameter.png" width="80%" alt="Text">
+
+Users can enter any value that will be converted to a string datatype. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor``
+
+```java
+String textParameter = extractor.singleValueParameter(SP_KEY, String.class);
+```
+
+#### Number parameters
+
+A number parameter lets the user enter a number value, either a floating-point number or an integer:
+
+```java
+// create an integer parameter
+.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"))
+
+// create a float parameter
+.requiredFloatParameter(Labels.from("float-key", "Float Parameter", "Example Description"))
+
+```
+
+leads to the following input dialog in the pipeline editor only accepting integer values:
+
+<img src="/docs/img/dev-guide-static-properties/sp-number-parameter.png" width="80%" alt="Number Parameter">
+
+The pipeline editor performs type validation and ensures that only numbers can be added by the user. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor``
+
+```java
+// Extract the integer parameter value
+Integer integerParameter = extractor.singleValueParameter(SP_KEY, Integer.class);
+
+// Extract the float parameter value
+Float floatParameter = extractor.singleValueParameter("float-key", Float.class);
+
+```
+
+#### Numbers with value specification
+
+You can also specify the value range of a number-based free text parameter:
+
+```java
+// create an integer parameter with value range
+.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"), 0, 100, 1)
+
+```
+
+which renders the following input field:
+
+<img src="/docs/img/dev-guide-static-properties/sp-number-parameter-with-range.png" width="80%" alt="Number Parameter">
+
+Receive the entered value in the same way as a standard number parameter.
+
+#### Free-text parameters linked to an event property
+
+
+### Single-Value Selections
+
+Single-value selections let the user select from a pre-defined list of options.
+A single-value selection requires to select exactly one option.
+
+```java
+.requiredSingleValueSelection(Labels.from("id", "Example Name", "Example Description"),
+    Options.from("Option A", "Option B", "Option C"))
+
+```
+
+Single-value selections will be rendered as a set of radio buttons in the pipeline editor:
+
+<img src="/docs/img/dev-guide-static-properties/sp-single-selection.png" width="80%" alt="Number Parameter">
+
+To extract the selected value, use the following method from the parameter extractor:
+
+```java
+// Extract the selected value
+String selectedSingleValue = extractor.selectedSingleValue("id", String.class);
+```
+
+<div class="admonition tip">
+<div class="admonition-title">Declaring options</div>
+<p>Sometimes, you may want to use an internal name that differs from the display name of an option.
+For that, you can use the method Options.from(Tuple2<String, String>) and the extractor method selectedSingleValueInternalName.</p>
+</div>
+
+
+
+### Multi-Value Selections
+
+Multi-value selections let the user select from a pre-defined list of options, where multiple or no option might be selected.
+
+```java
+.requiredMultiValueSelection(Labels.from("id", "Example Name", "Example Description"),
+    Options.from("Option A", "Option B", "Option C"))
+
+```
+
+Multi-value selections will be rendered as a set of checkboxes in the pipeline editor:
+
+<img src="/docs/img/dev-guide-static-properties/sp-multi-selection.png" width="80%" alt="Number Parameter">
+
+To extract the selected value, use the following method from the parameter extractor:
+
+```java
+// Extract the selected value
+List<String> selectedMultiValue = extractor.selectedMultiValues("id", String.class);
+```
+
+### Domain Concepts
+
+(coming soon...)
+
+### Collections
+
+You can also define collections based on other static properties.
+
+```java
+// create a collection parameter
+.requiredParameterAsCollection(Labels.from("collection", "Example Name", "Example " +
+        "Description"), StaticProperties.stringFreeTextProperty(Labels
+        .from("text-property","Text","")))
+```
+
+While the items of the collection can be provided in the same way as the underlying static property, the UI provides buttons to add and remove items to the collections.
+
+<img src="/docs/img/dev-guide-static-properties/sp-collection.png" width="80%" alt="Number Parameter">
+
+To extract the selected values from the collection, use the following method from the parameter extractor:
+
+```java
+// Extract the text parameter value
+List<String> textParameters = extractor.singleValueParameterFromCollection("collection", String.class);
+```
+
+### Runtime-resolvable selections
+
+In some cases, the options of selection parameters are not static, but depend on other values or might change at runtime. In this case, you can use runtime-resolvable selections.
+
+First, let your controller class implement ``ResolvesContainerProvidedOptions``:
+
+```java
+public class RuntimeResolvableSingleValue extends
+     StandaloneEventProcessingDeclarer<DummyParameters> implements ResolvesContainerProvidedOptions { ... }
+```
+
+Next, define the parameter in the ``declareModel`` method:
+
+```java
+// create a single value selection parameter that is resolved at runtime
+    .requiredSingleValueSelectionFromContainer(Labels.from("id", "Example Name", "Example " +
+            "Description"))
+```
+
+Finally, implement the method ``resolveOptions``, which will be called at runtime once the processor is used:
+
+```java
+  @Override
+  public List<RuntimeOptions> resolveOptions(String requestId, EventProperty linkedEventProperty) {
+    return Arrays.asList(new RuntimeOptions("I was defined at runtime", ""));
+  }
+```
+
+The UI will render a single-value parameter based on the options provided at runtime:
+
+<img src="/docs/img/dev-guide-static-properties/sp-single-selection-remote.png" width="80%" alt="Number Parameter">
+
+The parameter extraction does not differ from the extraction of static single-value parameters.
+
+<div class="admonition info">
+<div class="admonition-title">Multi-value selections</div>
+<p>Although this example shows the usage of runtime-resolvable selections using single value selections, the same also works for multi-value selections!</p>
+</div>
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-stream-requirements.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-stream-requirements.md
new file mode 100644
index 0000000..b892906
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-sdk-stream-requirements.md
@@ -0,0 +1,179 @@
+---
+id: version-0.67.0-extend-sdk-stream-requirements
+title: SDK Guide: Stream Requirements
+sidebar_label: SDK: Stream Requirements
+original_id: extend-sdk-stream-requirements
+---
+
+## Introduction
+
+Data processors and data sinks can define ``StreamRequirements``. Stream requirements allow pipeline elements to express requirements on an incoming event stream that are needed for the element to work properly.
+Once users create pipelines in the StreamPipes Pipeline Editor, these requirements are verified against the connected event stream.
+By using this feature, StreamPipes ensures that only pipeline elements can be connected that are syntactically and semantically valid.
+
+This guide covers the creation of stream requirements. Before reading this section, we recommend that you make yourself familiar with the SDK guide on [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://www.github.com/apache/incubator-streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/requirements/">Github</a>.</p>
+</div>
+
+## The StreamRequirementsBuilder
+
+Stream requirements can be defined in the ``Controller`` class of the pipeline element. Start with a method body like this:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+The ``StreamRequirementsBuilder`` class provides methods to add stream requirements to a pipeline element.
+
+## Requirements on primitive fields
+
+As a very first example, let's assume we would like to create a data processor that filters numerical values that are above a given threshold.
+Consequently, any data stream that is connected to the filter processor needs to provide a numerical value.
+
+The stream requirement would be assigned as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredProperty(EpRequirements.numberReq())
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+Note the line starting with ``requiredProperty``, which requires any stream to provide a datatype of type ``number``.
+
+In many cases, you'll want to let the user select a specific field from a data stream from all available fields that match the specified requirement. For that, you simply use the method ``requiredPropertyWithUnaryMapping`` as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(),
+                    Labels.from("number-mapping", "The value that should be filtered", ""), PropertyScope.NONE)
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+See also the developer guide on [static properties](dev-guide-static-properties.md) to better understand the usage of ``MappingProperties``.
+
+Requirements on primitive fields can be specified for all common datatypes:
+
+```java
+ @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".simple", "Simple requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.numberReq()) // any number
+                    .requiredProperty(EpRequirements.doubleReq()) // any field of type double
+                    .requiredProperty(EpRequirements.booleanReq()) // any field of type boolean
+                    .requiredProperty(EpRequirements.integerReq()) // any field of type integer
+                    .requiredProperty(EpRequirements.stringReq()) // any field of type string
+
+                    .requiredProperty(EpRequirements.anyProperty()) // any field allowed (no restriction)
+                    .requiredProperty(EpRequirements.timestampReq())  // any timestamp field
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+```
+
+### Specifying semantics
+
+For some algorithms, only specifying the datatype is not sufficient. Let's consider a geofencing algorithm that detects the precense some geospatial coordinate (e.g., from a vehicle) within a given location.
+
+You could specify something like this:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+However, this would allow users to create strange pipelines as users could connect any stream containing a double value to our geofencing algorithm.
+To avoid such situations, you can also specify requirements based on the semantics of a field:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Latitude), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Longitude), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+Note that in this case, we make use of Schema.org's ``Latitude`` concept ([https://schema.org/latitude](https://schema.org/latitude)). StreamPipes already includes popular vocabularies for specifying semantics. You are also free to use your own vocabularies.
+
+
+## Requirements on lists
+
+Similarly to primitive requirements, you can define processors that require data streams with list fields, see the following examples:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".list", "List requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Integer))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Double))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Boolean))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.String))
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+## Requirements on nested properties
+
+(coming soon, see the Javadoc for now)
+
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-setup.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-setup.md
new file mode 100644
index 0000000..3b1ae09
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-setup.md
@@ -0,0 +1,32 @@
+---
+id: version-0.67.0-extend-setup
+title: Development Setup
+sidebar_label: Development Setup
+original_id: extend-setup
+---
+
+Pipeline elements in StreamPipes are provided as standalone microservices. New pipeline elements can be easily developed using the provided Maven archetypes and can be installed in StreamPipes at runtime.
+
+In this section, we describe our recommended minimum setup for locally setting up a development instance of StreamPipes needed to develop, run and test new pipeline elements.
+
+## IDE & required dev tools
+StreamPipes does not have specific requirements on the IDE - so feel free to choose the IDE of your choice.
+The only requirements in terms of development tools are that you have Java 8 and Maven installed.
+
+## StreamPipes CLI: Docker-based local StreamPipes instance
+In order to quickly test developed pipeline elements without needing to install all services required by StreamPipes, we provide a CLI tool that allows you to selectively start StreamPipes components.
+The CLI tool allows to switch to several templates (based on docker-compose) depending on the role. 
+
+The documentation on the usage of the CLI tool is available [here](extend-cli).
+## Starter projects
+
+Now, once you've started the development instance, you are ready to develop your very first pipeline element.
+Instead of starting from scratch, we recommend using our provided maven archetypes:
+
+### Maven archetypes
+
+Create the Maven archetype as described in the [Getting Started](extend-archetypes) guide.
+
+### Examples
+
+We provide several examples that explain the usage of some concepts in this [Github repo](https://github.com/apache/incubator-streampipes-examples). 
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-processors.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-processors.md
new file mode 100644
index 0000000..69632a8
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-processors.md
@@ -0,0 +1,500 @@
+---
+id: version-0.67.0-extend-tutorial-data-processors
+title: Tutorial: Data Processors
+sidebar_label: Tutorial: Data Processors
+original_id: extend-tutorial-data-processors
+---
+
+In this tutorial, we will add a new data processor using the Apache Flink wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data processor and a Flink-compatible implementation.
+Once a pipeline is started that uses this data processor, the implementation is submitted to an Apache Flink cluster.
+
+## Objective
+
+We are going to create a new data processor that realized a simple geofencing algorithm - we detect vehicles that enter a specified radius around a user-defined location.
+This pipeline element will be a generic element that works with any event stream that provides geospatial coordinates in form of a latitude/longitude pair.
+
+The algorithm outputs every location event once the position has entered the geofence.
+
+
+<div class="admonition note">
+<div class="admonition-title">Note</div>
+<p>The implementation in this tutorial is pretty simple - our processor will fire an event every time the GPS location is inside the geofence.
+       In a real-world application, you would probably want to define a pattern that recognizes the _first_ event a vehicle enters the geofence.<br/>
+       This can be easily done using a CEP library, e.g., Apache Flink CEP.</p>
+</div>
+
+
+## Project setup
+
+To create new projects from scratch, several Maven archetypes exist to start developing.
+Enter the following command to create a new project based ongit st the StreamPipes ``Processors-Flink`` archetype:
+
+```
+mvn archetype:generate -DarchetypeGroupId=org.streampipes \
+-DarchetypeArtifactId=streampipes-archetype-pe-processors-flink -DarchetypeVersion=0.67.0 \
+-DgroupId=org.streampipes.tutorial -DartifactId=geofence-tutorial -DclassNamePrefix=Geofencing -DpackageName=geofencing
+```
+
+Once you've imported the generated project, the project structure should look as follows:
+
+<img src="/docs/img/tutorial-processors/project-structure-processor.PNG" alt="Project Structure Data Processor">
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data processor for StreamPipes!
+
+## Adding data processor requirements
+
+First, we will add a new stream requirement.
+Open the class `GeofencingController` which should look as follows:
+
+```java
+package org.streampipes.tutorial.pe.processor.geofencing;
+
+import org.streampipes.tutorial.config.Config;
+
+import org.streampipes.model.DataProcessorType;
+import org.streampipes.model.graph.DataProcessorDescription;
+import org.streampipes.model.graph.DataProcessorInvocation;
+import org.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.ProcessingElementParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.OutputStrategies;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.wrapper.flink.FlinkDataProcessorDeclarer;
+import org.streampipes.wrapper.flink.FlinkDataProcessorRuntime;
+
+public class GeofencingController extends
+				FlinkDataProcessorDeclarer<GeofencingParameters> {
+
+	private static final String EXAMPLE_KEY = "example-key";
+
+	@Override
+	public DataProcessorDescription declareModel() {
+		return ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing")
+						.category(DataProcessorType.ENRICH)
+                        .withAssets(Assets.DOCUMENTATION, Assets.ICON)
+						.withLocales(Locales.EN)
+						.requiredStream(StreamRequirementsBuilder
+							.create()
+							.requiredProperty(EpRequirements.anyProperty())
+							.build())
+						.outputStrategy(OutputStrategies.keep())
+						.requiredTextParameter(Labels.from(EXAMPLE_KEY, "Example Text Parameter", "Example " +
+				"Text Parameter Description"))
+						.build();
+	}
+
+	@Override
+	public FlinkDataProcessorRuntime<GeofencingParameters> getRuntime(DataProcessorInvocation
+				graph, ProcessingElementParameterExtractor extractor) {
+
+		String exampleString = extractor.singleValueParameter(EXAMPLE_KEY, String.class);
+
+		GeofencingParameters params = new GeofencingParameters(graph, exampleString);
+
+		return new GeofencingProgram(params, Config.INSTANCE.getDebug());
+	}
+
+}
+
+```
+
+In this class, we need to implement two methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `getRuntime` is used to create and deploy the parameterized Flink program once a pipeline using this element is started.
+
+Similar to data sources, the SDK provides a builder class to generate the description for data processors.
+Delete the content within the ``declareModel`` method and add the following lines to the `declareModel` method:
+
+```java
+return ProcessingElementBuilder.create("org.streampipes.tutorial.geofencing", "Geofencing", "A simple geofencing data processor " +
+            "using the Apache Flink wrapper")
+```
+
+This creates a new data processor with the ID, title and description assigned to the element builder.
+Next, we add some _stream requirements_ to the description. As we'd like to develop a generic pipeline element that works with any event that provides a lat/lng pair, we define two stream requirements as stated below:
+
+```java
+.requiredStream(StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+            Labels.from("latitude-field", "Latitude", "The event " +
+            "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+            Labels.from("longitude-field", "Longitude", "The event " +
+                    "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .build())
+```
+
+The first line, `.requiredStream()` defines that we want a data processor with exactly one input stream. Adding more stream requirements would create elements with multiple input connectors in StreamPipes.
+Stream requirements can be assigned by using the `StreamRequirementsBuilder` class.
+In our example, we define two requirements, so-called _domain property requirements_. In contrast to _data type requirements_ where we'd expect an event property with a field of a specific data type (e.g., float), domain property requirements expect a specific domain property, e.g., from a vocabulary such as the WGS84 Geo vocab.
+
+Once a pipeline is deployed, we are interested in the actual field (and its field name) that contains the latitude and longitude values.
+In some cases, there might me more than one field that satisfies a property requirement and we would like users to select the property the geofencing component should operate on.
+Therefore, our example uses the method `requiredPropertyWithUnaryMapping`, which will map a requirement to a real event property of an input stream and  let the user choose the appropriate field in the StreamPipes UI when pipelines are defined.
+
+Finally, the `PropertyScope` indicates that the required property is a measurement value (in contrast to a dimension value). This allows us later to provide improved user guidance in the pipeline editor.
+
+Besides requirements, users should be able to define the center coordinate of the Geofence and the size of the fence defined as a radius around the center in meters.
+The radius can be defined by adding a simple required text field to the description:
+
+```java
+.requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1)
+```
+
+Similar to mapping properties, text parameters have an internalId (radius), a label and a description.
+In addition, we can assign a _value specification_ to the parameter indicating the value range we support.
+Our example supports a radius value between 0 and 1000 with a granularity of 1.
+In the StreamPipes UI, a required text parameter is rendered as a text input field, in case we provide an optional value specification, a slider input is automatically generated.
+
+Such user-defined parameters are called _static properties_. There are many different types of static properties (see the [Processor SDK](dev-guide-sdk-guide-processors#docsNav) for an overview).
+
+One example are _DomainProperties_ we use for defining the center of the geofence.
+Our data processor requires a lat/lng pair that marks the center of the geofence.
+However, letting users directly input latitude and longitude coordinates wouldn't be very user-friendly.
+Therefore, we can link required text parameters to _ontology concepts_. By understanding the required input, the StreamPipes UI is able to determine which user interface works best for a specific concept.
+
+Add the following line to the `declareModel` method:
+
+```java
+.requiredOntologyConcept(Labels.from("location", "Geofence Center", "Provide the coordinate of the " +
+    "geofence center"), OntologyProperties.mandatory(Geo.lat), OntologyProperties.mandatory(Geo.lng))
+
+```
+
+We've now defined that we would like to receive an instance that provides a latitude and a longitude coordinate.
+Users can input these values either manually, or they can look up _domain knowledge_, i.e., knowledge stored isolated from the stream definition.
+
+Finally, we need to define technical requirements of the data processor, called _groundings_.
+StreamPipes supports various runtime data formats (e.g., JSON or Thrift) and various protocols (e.g., Kafka or JMS).
+Each component defines supports formats and protocol separately.
+For our example, we'd like to support JSON-based messages and Kafka as input and output broker protocol, so append the following:
+
+```java
+.supportedProtocols(SupportedProtocols.kafka())
+.supportedFormats(SupportedFormats.jsonFormat())
+.build();
+```
+
+Now we need to define the output of our Geofencing pipeline element.
+As explained in the first section, the element should fire every time some geo-located entity arrives within the defined geofence.
+Therefore, the processor outputs the same schema as it receives as an input.
+Although we don't know the exact input right now as it depends on the stream users connect in StreamPipes when creating pipelines, we can define an _output strategy_ as follows:
+
+```java
+.outputStrategy(OutputStrategies.keep())
+```
+
+This defines a _KeepOutputStrategy_, i.e., the input event schema is not modified by the processor.
+There are many more output strategies you can define depending on the functionality you desire, e.g., _AppendOutput_ for defining a processor that enriches events or _CustomOutput_ in case you would like users to select the output by themselves.
+
+That's it! We've now defined input requirements, required user input, technical requirements concerned with data format and protocol and an output strategy.
+In the next section, you will learn how to extract these parameters once the pipeline element is invoked after a pipeline was created.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _getRuntime_ method in our class is called. The class `DataSinkInovcation` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+Before we explain in more detail how to extract these values from the processor invocation, we need to adapt a little helper class.
+Open the file ```GeofencingParameters``` and modify it as follows:
+
+```java
+public class GeofencingParameters extends EventProcessorBindingParams {
+
+  private String latitudeFieldName;
+  private String longitudeFieldName;
+
+  private Float centerLatitude;
+  private Float centerLongitude;
+
+  private Integer radius;
+
+  public GeofencingParameters(DataProcessorInvocation graph, String latitudeFieldName, String longitudeFieldName,
+                              Float centerLatitude, Float centerLongitude, Integer radius) {
+    super(graph);
+    this.latitudeFieldName = latitudeFieldName;
+    this.longitudeFieldName = longitudeFieldName;
+    this.centerLatitude = centerLatitude;
+    this.centerLongitude = centerLongitude;
+    this.radius = radius;
+  }
+
+  public String getLatitudeFieldName() {
+    return latitudeFieldName;
+  }
+
+  public String getLongitudeFieldName() {
+    return longitudeFieldName;
+  }
+
+  public Float getCenterLatitude() {
+    return centerLatitude;
+  }
+
+  public Float getCenterLongitude() {
+    return centerLongitude;
+  }
+
+  public Integer getRadius() {
+    return radius;
+  }
+```
+
+This simple Pojo class will later serve to store user-defined parameters in a single object.
+
+Now we go back to the controller class and extract these values from the invocation object.
+
+The ``ProcessingElementParameterExtractor``  provides convenience methods to extract the relevant information from the `DataProcessorInvocation` object.
+
+Next, we are interested in the fields of the input event stream that contains the latitude and longitude value we would like to compute against the geofence center location as follows:
+
+```java
+String latitudeFieldName = extractor.mappingPropertyValue("latitude-field");
+String longitudeFieldName = extractor.mappingPropertyValue("longitude-field");
+```
+
+We use the same `internalId` we've used to define the mapping property requirements in the `declareModel` method.
+
+Next, for extracting the geofence center coordinates, we use the following statements:
+
+```java
+Float centerLatitude = extractor.supportedOntologyPropertyValue("location", Geo.lat, Float.class);
+Float centerLongitude = extractor.supportedOntologyPropertyValue("location", Geo.lng, Float.class);
+```
+
+The radius value can be extracted as follows:
+
+```java
+Integer radius = extractor.singleValueParameter("radius", Integer.class);
+```
+
+Now we can create a new instance of our previously created parameter class:
+
+```java
+GeofencingParameters params = new GeofencingParameters(dataProcessorInvocation, latitudeFieldName,
+            longitudeFieldName, centerLatitude, centerLongitude, radius);
+```
+
+Finally, return an instance of the class ```GeofencingProgram```:
+
+```java
+return new GeofencingProgram(params, true);
+```
+
+<div class="admonition tip">
+<div class="admonition-title">Info</div>
+<p>The line above uses the Flink MiniCluster to start the Flink program for debugging purposes.
+       Before you build the project and use it in a real environment, replace the line as follows, which triggers cluster execution:
+       <code>return new GeofencingProgram(params, new FlinkDeploymentConfig(Config.JAR_FILE, Config.INSTANCE.getFlinkHost(), Config.INSTANCE.getFlinkPort())</code></p>
+</div>
+
+
+Great! That's all we need to describe a data processor for usage in StreamPipes. Your controller class should look as follows:
+
+```java
+import org.streampipes.model.graph.DataProcessorDescription;
+import org.streampipes.model.graph.DataProcessorInvocation;
+import org.streampipes.model.schema.PropertyScope;
+import org.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.ProcessingElementParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.OntologyProperties;
+import org.streampipes.sdk.helpers.OutputStrategies;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.vocabulary.Geo;
+import org.streampipes.wrapper.flink.FlinkDataProcessorDeclarer;
+import org.streampipes.wrapper.flink.FlinkDataProcessorRuntime;
+
+public class GeofencingController extends FlinkDataProcessorDeclarer<GeofencingParameters> {
+
+  @Override
+  protected FlinkDataProcessorRuntime<GeofencingParameters> getRuntime(DataProcessorInvocation dataProcessorInvocation) {
+    ProcessingElementParameterExtractor extractor = ProcessingElementParameterExtractor.from(dataProcessorInvocation);
+
+    String latitudeFieldName = extractor.mappingPropertyValue("latitude-field");
+    String longitudeFieldName = extractor.mappingPropertyValue("longitude-field");
+
+    Float centerLatitude = extractor.supportedOntologyPropertyValue("location", Geo.lat, Float.class);
+    Float centerLongitude = extractor.supportedOntologyPropertyValue("location", Geo.lng, Float.class);
+
+    Integer radius = extractor.singleValueParameter("radius", Integer.class);
+
+    GeofencingParameters params = new GeofencingParameters(dataProcessorInvocation, latitudeFieldName,
+            longitudeFieldName, centerLatitude, centerLongitude, radius);
+
+    return new GeofencingProgram(params);
+  }
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("geofencing-flink", "Geofencing", "A simple geofencing data processor " +
+            "using the Apache Flink wrapper")
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+                            Labels.from("latitude-field", "Latitude", "The event " +
+                            "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+                            Labels.from("longitude-field", "Longitude", "The event " +
+                                    "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                    .build())
+            .requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.",
+                    0, 1000, 1)
+            .requiredOntologyConcept(Labels.from("location", "Geofence Center", "Provide the coordinate of the " +
+                    "geofence center"), OntologyProperties.mandatory(Geo.lat), OntologyProperties.mandatory(Geo.lng))
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+            .build();
+  }
+}
+
+```
+
+## Adding an implementation
+
+Everything we need to do now is to add an implementation which does not differ from writing an Apache Flink topology.
+
+Open the class `GeofencingProcessor.java` and add the following piece of code, which realizes the Geofencing functionality:
+
+```java
+public class GeofencingProcessor implements FlatMapFunction<Map<String, Object>, Map<String, Object>> {
+
+  private String latitudeFieldName;
+  private String longitudeFieldName;
+
+  private Float centerLatitude;
+  private Float centerLongitude;
+
+  private Integer radius;
+
+  public GeofencingProcessor(String latitudeFieldName, String longitudeFieldName, Float centerLatitude, Float centerLongitude, Integer radius) {
+    this.latitudeFieldName = latitudeFieldName;
+    this.longitudeFieldName = longitudeFieldName;
+    this.centerLatitude = centerLatitude;
+    this.centerLongitude = centerLongitude;
+    this.radius = radius;
+  }
+
+  @Override
+  public void flatMap(Event in, Collector<Event> out) throws Exception {
+    Float latitude = in.getFieldBySelector(latitudeFieldName).getAsPrimitive().getAsFloat();
+    Float longitude = in.getFieldBySelector(longitudeFieldName).getAsPrimitive().getAsFloat();
+
+    Float distance = distFrom(latitude, longitude, centerLatitude, centerLongitude);
+
+    if (distance <= radius) {
+      out.collect(in);
+    }
+  }
+
+  public static Float distFrom(float lat1, float lng1, float lat2, float lng2) {
+    double earthRadius = 6371000;
+    double dLat = Math.toRadians(lat2-lat1);
+    double dLng = Math.toRadians(lng2-lng1);
+    double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
+            Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
+                    Math.sin(dLng/2) * Math.sin(dLng/2);
+    double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
+    return (float) (earthRadius * c);
+  }
+
+}
+```
+
+We won't go into details here as this isn't StreamPipes-related code, but in general the class extracts latitude and longitude fields from the input event (which is provided as a map data type) and calculates the distance between the geofence center and these coordinates.
+If the distance is below the given radius, the event is forwarded to the next operator.
+
+Finally, we need to connect this program to the Flink topology. StreamPipes automatically adds things like the Kafka consumer and producer, so that you only need to invoke the actual geofencing processor.
+Open the file `GeofencingProgram` and append the following line inside the `getApplicationLogic()` method:
+
+```java
+return dataStreams[0].flatMap(new GeofencingProcessor(params.getLatitudeFieldName(), params.getLongitudeFieldName(),
+    params.getCenterLatitude(), params.getCenterLongitude(), params.getRadius()));
+```
+
+## Preparing the container
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source and automatically submits the program to the registered Apache Flink cluster.
+
+Go to the class `Init` that extends `StandaloneModelSubmitter` and should look as follows:
+```java
+package org.streampipes.tutorial.main;
+
+import org.streampipes.container.init.DeclarersSingleton;
+import org.streampipes.container.standalone.init.StandaloneModelSubmitter;
+
+import org.streampipes.tutorial.config.Config;
+import org.streampipes.tutorial.pe.processor.geofencing.GeofencingController;
+
+public class Init extends StandaloneModelSubmitter {
+
+  public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new GeofencingController());
+
+    new Init().init(Config.INSTANCE);
+
+  }
+}
+```
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.
+</p>
+</div>
+
+## Starting the container
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element containers as defined in the `Config` file is port 8090.
+       If you'd like to run mutliple containers at the same time on your development machine, change the port in the environment file.
+</p>
+</div>
+
+Now we are ready to start our container!
+
+Execute the main method in the class `Main` we've just created, open a web browser and navigate to http://localhost:8090 (or the port you have assigned in the environment file).
+
+You should see something as follows:
+
+<img src="/docs/img/tutorial-processors/pe-overview-flink.PNG" alt="Pipeline Element Container Overview (Flink)">
+
+Click on the link of the data source to see the RDF description of the pipeline element.
+
+<img src="/docs/img/tutorial-processors/pe-rdf-geofencing.PNG" alt="Geofencing RDF description">
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-installation).
+
+## Read more
+
+Congratulations! You've just created your first data processor for StreamPipes.
+There are many more things to explore and data processors can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](dev-guide-sdk-guide-processors) to see what's possible!
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sinks.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sinks.md
new file mode 100644
index 0000000..21eaab8
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sinks.md
@@ -0,0 +1,247 @@
+---
+id: version-0.67.0-extend-tutorial-data-sinks
+title: Tutorial: Data Sinks
+sidebar_label: Tutorial: Data Sinks
+original_id: extend-tutorial-data-sinks
+---
+
+In this tutorial, we will add a new data sink using the standalone wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data sink and a corresponding implementation.
+
+## Objective
+
+We are going to create a new data sink that calls an external HTTP endpoint to forward data to an external service.
+
+For each incoming event, an external service is invoked using an HTTP POST request. In this example, we'll call an endpoint provided by [RequestBin](https://requestbin.com/).
+To setup your own endpoint, go to [https://requestbin.com/](https://requestbin.com/) and click "Create a request bin". Copy the URL of the newly created endpoint.
+
+
+## Project setup
+
+We'll create a new project using the provided sinks-standalone-jvm Maven archetype.
+Enter the following command to create a new project based on the StreamPipes ``Sinks-JVM`` archetype:
+
+```
+mvn archetype:generate -DarchetypeGroupId=org.streampipes \
+-DarchetypeArtifactId=streampipes-archetype-pe-sinks-jvm -DarchetypeVersion=0.67.0 \
+-DgroupId=org.streampipes.tutorial -DartifactId=sink-tutorial -DclassNamePrefix=Rest -DpackageName=geofencing
+```
+
+Once you've imported the generated project, the project structure should look as follows:
+
+<img src="/docs/img/tutorial-sinks/project-structure-sinks.png" alt="Project Structure Data Sink">
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data sink for StreamPipes!
+
+## Adding data sink requirements
+
+First, we will add a new stream requirement.
+Open the class `RestController` which should look as follows:
+
+```java
+package org.streampipes.tutorial.pe.sink.rest;
+
+import org.streampipes.model.DataSinkType;
+import org.streampipes.model.graph.DataSinkDescription;
+import org.streampipes.model.graph.DataSinkInvocation;
+import org.streampipes.sdk.builder.DataSinkBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.DataSinkParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.wrapper.standalone.ConfiguredEventSink;
+import org.streampipes.wrapper.standalone.declarer.StandaloneEventSinkDeclarer;
+import org.streampipes.sdk.helpers.*;
+import org.streampipes.sdk.utils.Assets;
+
+public class RestController extends StandaloneEventSinkDeclarer<RestParameters> {
+
+	private static final String EXAMPLE_KEY = "example-key";
+
+	@Override
+	public DataSinkDescription declareModel() {
+		return DataSinkBuilder.create("org.streampipes.tutorial.pe.sink.rest")
+						.category(DataSinkType.NOTIFICATION)
+						.withAssets(Assets.DOCUMENTATION, Assets.ICON)
+						.withLocales(Locales.EN)
+						.requiredStream(StreamRequirementsBuilder
+                                            .create()
+                                            .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId(
+                                                    "fields-to-send"), PropertyScope.NONE)
+                                            .build())
+						.build();
+	}
+
+	@Override
+	public ConfiguredEventSink<RestParameters> onInvocation(DataSinkInvocation graph, DataSinkParameterExtractor extractor) {
+
+		 List<String> fieldsToSend = extractor.mappingPropertyValues("fields-to-send");
+        
+         RestParameters params = new RestParameters(graph, fieldsToSend);
+
+		return new ConfiguredEventSink<>(params, Rest::new);
+	}
+
+}
+
+```
+
+In this class, we need to implement two methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `onInvocation` is used to create and deploy program once a pipeline using this sink is started.
+
+The ``declareModel`` method describes the properties of our data sink:
+* ``category`` defines a category for this sink.
+* ``withAssets`` denotes that we will provide an external documentation file and an icon, which can be found in the ``resources`` folder
+* ``withLocales`` defines that we will provide an external language file, also available in the ``resources`` folder
+* ``requiredStream`` defines requirements any input stream connected to this sink must provide. In this case, we do not have any specific requirements, we just forward all incoming events to the REST sink. However, we want to let the user display a list of available fields from the connected input event, where users can select a subset. This is defined by defining a Mapping from the empty requirement. This will later on render a selection dialog in the pipeline editor.
+
+The ``onInvocation`` method is called when a pipeline containing the sink is started. Once a pipeline is started, we would like to extract user-defined parameters.
+In this example, we simply extract the fields selected by users that should be forwarded to the REST sink. Finally, we return a new configured event sink containing the parameters.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _getRuntime_ method in our class is called. The class `DataSinkInovcation` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+Before we explain in more detail how to extract these values from the processor invocation, we need to adapt a little helper class.
+Open the file ```RestParameters``` and modify it as follows:
+
+```java
+public class RestParameters extends EventSinkBindingParams {
+
+  private List<String> fieldsToSend;
+
+  public RestParameters(DataSinkInvocation graph, List<String> fieldsToSend) {
+    super(graph);
+    this.fieldsToSend = fieldsToSend;
+  }
+
+  public List<String> getFieldsToSend() {
+    return fieldsToSend;
+  }
+}
+```
+
+This file will later provide information on the configured pipeline element.
+
+## Adding an implementation
+
+Now open the class ``Rest`` to add the proper implementation (i.e., the Rest call executed for every incoming event).
+
+Our final class should look as follows:
+
+```java
+private static Logger LOG = LoggerFactory.getLogger(Rest.class.getCanonicalName());
+
+  private static final String REST_ENDPOINT_URI = YOUR_REQUEST_BIN_URL;
+  private List<String> fieldsToSend;
+  private SpDataFormatDefinition dataFormatDefinition;
+
+  public Rest() {
+    this.dataFormatDefinition = new JsonDataFormatDefinition();
+  }
+
+  @Override
+  public void onInvocation(RestParameters parameters, EventSinkRuntimeContext runtimeContext) {
+    this.fieldsToSend = parameters.getFieldsToSend();
+  }
+
+  @Override
+  public void onEvent(Event event) {
+    Map<String, Object> outEventMap = event.getSubset(fieldsToSend).getRaw();
+    try {
+      String json = new String(dataFormatDefinition.fromMap(outEventMap));
+      Request.Post(REST_ENDPOINT_URI).body(new StringEntity(json, Charsets.UTF_8)).execute();
+    } catch (SpRuntimeException e) {
+      LOG.error("Could not parse incoming event");
+    } catch (IOException e) {
+      LOG.error("Could not reach endpoint at {}", REST_ENDPOINT_URI);
+    }
+  }
+
+  @Override
+  public void onDetach() {
+
+  }
+```
+The only class variable you need to change right now is the REST_ENDPOINT_URL. Change this url to the URL provided by your request bin.
+We'll ignore the other class variables and the constructor for now. Important are three methods, ``onInvocation``, ``onEvent`` and ``onDetach``.
+
+The ``onInvocation`` method is called once a pipeline containing our REST data sink is started. The ``onEvent`` method is called for each incoming event. Finally, ``onDetach`` is called when a pipeline is stopped.
+
+In the ``onInvocation`` method, we can extract the selected fields to be forwarded to the REST endpoint.
+In the ``Γ²nEvent`` method, we use a helper method to get a subset of the incoming event.
+Finally, we convert the resulting ``Map`` to a JSON string and call the endpoint.
+
+
+## Preparing the container
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source and automatically starts the program in an embedded container.
+
+Go to the class `Init` that extends `StandaloneModelSubmitter` and should look as follows:
+```java
+
+public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new RestController());
+
+    DeclarersSingleton.getInstance().setPort(Config.INSTANCE.getPort());
+    DeclarersSingleton.getInstance().setHostName(Config.INSTANCE.getHost());
+
+    DeclarersSingleton.getInstance().registerDataFormats(new JsonDataFormatFactory(),
+            new CborDataFormatFactory(),
+            new SmileDataFormatFactory(),
+            new FstDataFormatFactory());
+
+    DeclarersSingleton.getInstance().registerProtocols(new SpKafkaProtocolFactory(),
+            new SpJmsProtocolFactory());
+
+    new Init().init(Config.INSTANCE);
+```
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.
+</p>
+</div>
+
+## Starting the container
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element containers as defined in the `Config` file is port 8090.
+       If you'd like to run mutliple containers at the same time on your development machine, change the port in the environment file.
+</p>
+</div>
+
+Now we are ready to start our container!
+
+Execute the main method in the class `Main` we've just created.
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-installation).
+
+## Read more
+
+Congratulations! You've just created your first data sink for StreamPipes.
+There are many more things to explore and data sinks can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](dev-guide-sdk-guide-sinks) to see what's possible!
diff --git a/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sources.md b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sources.md
new file mode 100644
index 0000000..a7ce1f7
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/06_extend-tutorial-data-sources.md
@@ -0,0 +1,283 @@
+---
+id: version-0.67.0-extend-tutorial-data-sources
+title: Tutorial: Data Sources
+sidebar_label: Tutorial: Data Sources
+original_id: extend-tutorial-data-sources
+---
+
+In this tutorial, we will add a new data source consisting of a single data stream. The source will be provided as a standalone component (i.e., the description will be accessible through an integrated web server).
+
+## Objective
+
+We are going to create a new data stream that is produced by a GPS sensor installed in a delivery vehicle.
+The sensor produces a continuous stream of events that contain the current timestamp, the current lat/lng position of the vehicle and the plate number of the vehicle.
+Events are published in a JSON format as follows:
+```json
+{
+  "timestamp" : 145838399,
+  "latitude" : 37.04,
+  "longitude" : 17.04,
+  "plateNumber" : "KA-AB 123"
+}
+```
+
+These events are published to a Kafka broker using the topic `org.streampipes.tutorial.vehicle`.
+
+In the following section, we show how to describe this stream in a form that allows you to import and use it in StreamPipes.
+
+## Project setup
+
+Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton.
+Enter the following command in a command line of your choice (Apache Maven needs to be installed):
+
+```
+mvn archetype:generate \
+-DarchetypeGroupId=org.streampipes -DarchetypeArtifactId=streampipes-archetype-pe-sources \
+-DarchetypeVersion=0.67.0 -DgroupId=my.groupId \
+-DartifactId=my-source -DclassNamePrefix=MySource -DpackageName=mypackagename
+```
+
+Configure the variables ``artifactId`` (which will be the Maven artifactId), ``classNamePrefix`` (which will be the class name of your data stream) and ``packageName``.
+
+For this tutorial, use ``Vehicle`` as ``classNamePrefix``.
+
+Your project will look as follows:
+
+<img src="/docs/img/tutorial-sources/project-structure.PNG" alt="Project Structure">
+
+That's it, go to the next section to learn how to create your first data stream!
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.
+</p>
+</div>
+
+## Adding a data stream description
+
+Now we will add a new data stream definition.
+First, open the class `VehicleStream` which should look as follows:
+
+```java
+
+package my.groupId.pe.mypackagename;
+
+import org.streampipes.model.SpDataStream;
+import org.streampipes.model.graph.DataSourceDescription;
+import org.streampipes.sdk.builder.DataStreamBuilder;
+import org.streampipes.sdk.helpers.EpProperties;
+import org.streampipes.sdk.helpers.Formats;
+import org.streampipes.sdk.helpers.Protocols;
+import org.streampipes.sources.AbstractAdapterIncludedStream;
+
+
+public class MySourceStream extends AbstractAdapterIncludedStream {
+
+  @Override
+  public SpDataStream declareModel(DataSourceDescription sep) {
+    return DataStreamBuilder.create("my.groupId-mypackagename", "MySource", "")
+            .property(EpProperties.timestampProperty("timestamp"))
+
+            // configure your stream here
+
+            .format(Formats.jsonFormat())
+            .protocol(Protocols.kafka("localhost", 9092, "TOPIC_SHOULD_BE_CHANGED"))
+            .build();
+  }
+
+  @Override
+  public void executeStream() {
+
+  }
+}
+```
+
+This class extends the class ``AbstractAdapterIncludedStream``, which indicates that this source continuously produces data (configured in the ``executeStream()`` method.
+In contrast, the class `AbstractAlreadyExistingStream` indicates that we only want to describe an already existing stream (e.g., a stream that already sends data to an existing Kafka broker).
+
+Next, we will add the definition of the data stream. Add the following code inside of the `declareModel` method:
+```java
+return DataStreamBuilder.create("org.streampipes.tutorial.vehicle.position", "Vehicle Position", "An event stream " +
+          "that produces current vehicle positions")
+```
+
+This line creates a new instance of the SDK's `DataStreamBuilder` by providing three basic parameters:
+The first parameter must be a unique identifier of your data stream.
+The second and third parameters indicate a label and a description of your stream.
+These values will later be used in the StreamPipes UI to display stream details in a human-readable manner.
+
+Next, we will add the properties as stated above to the stream definition by adding the following lines:
+```java
+.property(EpProperties.timestampProperty("timestamp"))
+.property(EpProperties.stringEp(Labels.from("plate-number", "Plate Number", "Denotes the plate number of the vehicle"), "plateNumber", "http://my.company/plateNumber"))
+.property(EpProperties.doubleEp(Labels.from("latitude", "Latitude", "Denotes the latitude value of the vehicle's position"), "latitude", Geo.lat))
+.property(EpProperties.doubleEp(Labels.from("longitude", "Longitude", "Denotes the longitude value of the vehicle's position"), "longitude", Geo.lng))
+```
+These four _event properties_ compose our _event schema_. An event property must, at least, provide the following attributes:
+
+* **Runtime Name**. The runtime name indicates the key of the property at runtime, e.g., if our JSON message contains a structure such as `{"plateNumber" : "KA-F 123"}`, the runtime name must be `plateNumber`.
+* **Runtime Type**. An event property must have a primitive type (we will later see how to model more complex properties such as lists and nested properties).
+  The type must be an instance of `XMLSchema` primitives, however, the SDK provides convenience methods to provide the property type.
+* **Domain Property**. The domain property indicates the semantics of the event property. For instance, the `latitude` property is linked to the `http://www.w3.org/2003/01/geo/wgs84_pos#lat` property of the WGS84 vocabulary.
+  The domain property should be an URI as part of an existing or domain-specific vocabulary. The SDK provides convenience methods for popuplar vocabularies (e.g., Schema.org, Dolce or WGS84).
+
+In order to complete the minimum required specification of an event stream, we need to provide information on the transport format and protocol of the data stream at runtime.
+
+This can be achieved by extending the builder with the respective properties (which should already have been auto-generated):
+```java
+.format(Formats.jsonFormat())
+.protocol(Protocols.kafka("localhost", 9092, "TOPIC_SHOULD_BE_CHANGED"))
+.build();
+```
+
+Set ``org.streampipes.tutorial.vehicle`` as your new topic by replacing the term ``TOPIC_SHOULD_BE_CHANGED`.
+
+In this example, we defined that the data stream consists of events in a JSON format and that Kafka is used as a message broker to transmit events.
+The last build() method call triggers the construction of the RDF-based data stream definition.
+
+That's it! In the next section, we will connect the data stream to a source and inspect the generated RDF description.
+
+## Creating some dummy data
+
+Let's assume our stream should produce some random values that are sent to StreamPipes. We'll add a very simple data simulator to the ``executeStream`` method as follows:
+
+```java
+@Override
+  public void executeStream() {
+
+    SpKafkaProducer producer = new SpKafkaProducer("localhost:9092", "TOPIC_SHOULD_BE_CHANGED");
+    Random random = new Random();
+    Runnable runnable = new Runnable() {
+      @Override
+      public void run() {
+        for (;;) {
+          JsonObject jsonObject = new JsonObject();
+          jsonObject.addProperty("timestamp", System.currentTimeMillis());
+          jsonObject.addProperty("plateNumber", "KA-FZ 1");
+          jsonObject.addProperty("latitude", random.nextDouble());
+          jsonObject.addProperty("longitude", random.nextDouble());
+
+          producer.publish(jsonObject.toString());
+
+          try {
+            Thread.sleep(1000);
+          } catch (InterruptedException e) {
+            e.printStackTrace();
+          }
+
+        }
+      }
+    };
+
+    new Thread(runnable).start();
+  }
+```
+
+Change the topic and the URL of your Kafka broker (as stated in the controller).
+
+## Adding a source description
+
+A data source can be seen like a container for a set of data streams. Usually, a data source includes events that are logically or physically connected.
+For instance, in our example we would add other streams produced by vehicle sensors (such as fuel consumption) to the same data source description.
+
+Open the class `DataSource` which should look as follows:
+```java
+
+package my.groupId.pe.mypackagename;
+
+import org.streampipes.container.declarer.DataStreamDeclarer;
+import org.streampipes.container.declarer.SemanticEventProducerDeclarer;
+import org.streampipes.model.graph.DataSourceDescription;
+import org.streampipes.sdk.builder.DataSourceBuilder;
+
+import java.util.Arrays;
+import java.util.List;
+
+
+public class DataSource implements SemanticEventProducerDeclarer {
+
+  public DataSourceDescription declareModel() {
+    return DataSourceBuilder.create("my.groupId.mypackagename.source", "MySource " +
+        "Source", "")
+            .build();
+  }
+
+  public List<DataStreamDeclarer> getEventStreams() {
+    return Arrays.asList(new MySourceStream());
+  }
+}
+```
+First, we need to define the source. Similar to data streams, a source consists of an id, a human-readable name and a description.
+Replace the content defined in the `declareModel` method with the following code:
+```java
+return DataSourceBuilder.create("org.streampipes.tutorial.source.vehicle", "Vehicle Source", "A data source that " +
+    "holds event streams produced by vehicles.")
+    .build();
+```
+
+## Preparing the container
+
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source.
+
+Go to the class `Init` that implements `StandaloneModelSubmitter`, which should look as follows:
+```java
+package my.groupId.main;
+
+import org.streampipes.container.init.DeclarersSingleton;
+import org.streampipes.container.standalone.init.StandaloneModelSubmitter;
+import my.groupId.config.Config;
+import my.groupId.pe.mypackagename.DataSource;
+
+public class Init extends StandaloneModelSubmitter {
+
+  public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new DataSource());
+
+    new Init().init(Config.INSTANCE);
+
+  }
+}
+```
+This code adds the `VehicleSource`. Finally, the `init` method is called
+which triggers the generation of the corresponding RDF description and startup of the web server.
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.</p>
+</div>
+
+## Starting the container
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+Now we are ready to start our first container!
+
+Execute the main method in the class `Main` we've just created, open a web browser and navigate to http://localhost:8090, or change the port according to the value of the ``SP_PORT`` variable in the env file.
+
+You should see something as follows:
+
+<img src="/docs/img/tutorial-sources/pe-overview.PNG" alt="Pipeline Element Container Overview">
+
+Click on the link of the data source to see the RDF description of the pipeline element.
+
+<img src="/docs/img/tutorial-sources/pe-rdf.PNG" alt="Pipeline Element RDF description">
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-introduction).
+
+## Read more
+
+Congratulations! You've just created your first pipeline element for StreamPipes.
+There are many more things to explore and data sources can be defined in much more detail.
+Follow our [SDK guide](dev-guide-source-sdk) to see what's possible!
diff --git a/documentation/website/versioned_docs/version-0.67.0/07_technicals-architecture.md b/documentation/website/versioned_docs/version-0.67.0/07_technicals-architecture.md
new file mode 100644
index 0000000..1e63386
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/07_technicals-architecture.md
@@ -0,0 +1,63 @@
+---
+id: version-0.67.0-technicals-architecture
+title: Architecture
+sidebar_label: Architecture
+original_id: technicals-architecture
+---
+
+
+The following picture illustrates the high-level architecture of StreamPipes:
+
+<img src="/docs/img/architecture/high-level-architecture.png" alt="High Level Architecture of StreamPipes">
+
+Users mainly interact (besides other UI components) with the _Pipeline Editor_ to create stream processing pipelines based on data streams, data processors and data sinks.
+These reusable pipeline elements are provided by self-contained _pipeline element containers_, each of them having a semantic description that specifies their characteristics (e.g., input, output and required user input for data processors).
+Each pipeline element container has a REST endpoint that provides these characteristics as a JSON-LD document.
+
+Pipeline element containers are built using one of several provided _wrappers_.
+Wrappers abstract from the underlying runtime stream processing framework.
+Currently, the StreamPipes framework provides wrappers for Apache Flink, Esper and algorithms running directly on the JVM.
+
+The _pipeline manager_ manages the definition and execution of pipelines.
+When creating pipelines, the manager continuously matches the pipeline against its semantic description and provides user guidance in form of recommendations.
+Once a pipeline is started, the pipeline manager invokes the corresponding pipeline element containers.
+The container prepares the actual execution logic and submits the program to the underlying execution engine, e.g., the program is deployed in the Apache Flink cluster.
+
+Pipeline elements exchange data using one or more message brokers and protocols (e.g., Kafka or MQTT).
+StreamPipes does not rely on a specific broker or message format, but negotiates suitable brokers based on the capabilities of connected pipeline elements.
+
+Thus, StreamPipes provides a higher-level abstraction of existing stream processing technology by leveraging domain experts to create streaming analytics pipelines in a self-service manner.
+
+## Semantic description
+Pipeline elements in StreamPipes are meant to be reusable:
+
+* Data processors and data sink are generic (or domain-specific) elements that express their requirements and are able to operate on any stream that satisfies these requirements.
+* Data processors and data sinks can be manually configured by offering possible configuration parameters which users can individually define when creating pipelines.
+* Data streams can be connected to any data processor or data sink that matches the capabilities of the stream.
+
+When users create pipelines by connecting a data stream with a data processor (or further processors), the pipeline manager _matches_ the input stream of a data processor against its requirements.
+This matching is performed based on the _semantic description of each element.
+The semantic description (technically an RDF graph serialized as JSON-LD) can be best understood by seeing it as an envelope around a pipeline element.
+It only provides metadata information, while we don't rely on any RDF at runtime for exchanging events between pipeline elements.
+While RDF-based metadata ensures good understanding of stream capabilities, lightweight event formats at runtime (such as JSON or Thrift) ensure fast processing of events.
+
+Let's look at an example stream that produces a continuous stream of vehicle positions as illustrated below:
+
+<img src="/docs/img/architecture/semantic-description-stream.png" alt="Semantic description of data streams">
+
+While the runtime layer produces plain JSON by submitting actual values of the position and the vehicle's plate number, the description layer describes various characteristics of the stream:
+For instance, it defines the event schema (including, besides the data type and the runtime name of each property also a more fine-grained meaning of the property), quality aspects (e.g., the measurement unit of a property or the frequency) and the grounding (e.g., the format used at runtime and the communication protocol used for transmitting events).
+
+The same accounts for data processors and data sinks:
+
+<img src="/docs/img/architecture/semantic-description-processor.png" alt="Semantic description of data processor">
+
+Data processors (and, with some differences, data sinks) are annotated by providing metadata information on their required input and output.
+For instance, we can define minimum schema requirements (such as geospatial coordinates that need to be provided by any stream that is connected to a processor), but also required (minimum or maximum) quality levels and supported transport protocols and formats.
+In addition, required configuration parameters users can define during the pipeline definition process are provided by the semantic description.
+
+Once new pipeline elements are imported into StreamPipes, we store all information provided by the description layer in a central repository and use this information to guide useres through the pipeline definition process.
+
+Don't worry - you will never be required to model RDF by yourself.
+Our SDK provides convenience methods that help creating the description automatically.
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/07_technicals-configuration.md b/documentation/website/versioned_docs/version-0.67.0/07_technicals-configuration.md
new file mode 100644
index 0000000..5c26f20
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/07_technicals-configuration.md
@@ -0,0 +1,59 @@
+---
+id: version-0.67.0-technicals-configuration
+title: Configuration
+sidebar_label: Configuration
+original_id: technicals-configuration
+---
+
+On this page we explain how the StreamPipes configuration works.
+StreamPipes allows the individual services (pipeline element containers and third-party services) to store configuration parameters in a distributed key-value store.
+This has the advantage that individual services do not need to store any configurations on the local file system, enabling us to run containers anywhere.
+As a key-value store we use [Consul](https://www.consul.io/), which is an essential service for all our services.
+
+<img src="/docs/img/configuration/consul.png" width="50%" alt="Semantic description of data processor">
+
+
+## Edit Configurations
+All services in StreamPipes can have configuration parameters.
+You can either change them in the consul user interface (which is by default running on port 8500) or directly in the StreamPipes Configurations Page.
+Once a new  pipeline element container is started, it is registered in Consul and the parameters can be edited in the configuration page, as shown below.
+To store changes in Consul, the update button must be clicked.
+
+<div class="my-carousel">
+    <img src="/docs/img/configuration/configuration_1.png" alt="Configuration View">
+</div>
+
+## Configuration for Developers
+We provide a Configurations API for the use of configuration parameters in your services.
+Each processing element project has a β€œconfig” package [[Example]](https://github.com/apache/incubator-streampipes-extensions/tree/dev/streampipes-sinks-internal-jvm/src/main/java/org/streampipes/sinks/internal/jvm/config).
+This package usually contains two classes.
+One containing unique keys for the configuration values and one containing the getter and setter methods to access these values.
+For the naming of configuration keys, we recommend to use β€œSP” as a prefix.
+As we explain later, it is possible to set default configurations as environment variables, this prefix makes them unique on your server.
+A configuration entry needs a unique config key. For this key, a value can be specified containing the configuration, like for example the port number of the service.
+For each configuration, a description explaining the parameter can be provided, further the data type must be specified and whether it is a password or not.
+Below, the schema of a configuration item is shown on the left and an example of a port configuration on the right.
+
+<img src="/docs/img/configuration/config_key.png" width="80%" alt="Semantic description of data processor">
+
+As a developer, you can add as many new configurations to services as you wish, but there are some that are required for all processing element containers.
+Those are **the host**, **the port**, and **the name** of the service.
+
+## Default Values
+You can provide default values for the configurations, which are used when a configuration is read for the first time.
+The first option is to register a configuration parameter in the Config class.
+This is a fallback value, which is used if nothing else is defined.
+Since this value is static, we offer a second option.
+It is possible to provide a default value by setting an environment variable.
+In this case, the convention is that the key of a configuration parameter must be used as the environment variable.
+Now, this value is used instead of the value defined in the Config class.
+During development, the configuration values often need to be changed for debugging purposes, therefore we provide an .env file in all processing element projects and archetypes.
+This file can be used by your IDE to set the environment variables. (e.g., [Intellij Plugin](https://plugins.jetbrains.com/plugin/7861-envfile))
+When you need to change the variable at runtime, you can do this in the StreamPipes configurations as explained before.
+Those changes take effect immediately without the need of a container restart.
+
+<div class="admonition warning">
+<div class="admonition-title">Installed pipeline elements</div>
+<p>Be cautious, when the configuration is used in the semantic description of a processing element which is already installed in StreamPipes, you have to reload this element in StreamPipes (my elements -> reload).
+   In addition, changes might affect already running pipelines.</p>
+</div>
diff --git a/documentation/website/versioned_docs/version-0.67.0/07_technicals-messaging.md b/documentation/website/versioned_docs/version-0.67.0/07_technicals-messaging.md
new file mode 100644
index 0000000..2fa58f9
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/07_technicals-messaging.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-technicals-messaging
+title: Messaging
+sidebar_label: Messaging
+original_id: technicals-messaging
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/07_technicals-runtime-wrappers.md b/documentation/website/versioned_docs/version-0.67.0/07_technicals-runtime-wrappers.md
new file mode 100644
index 0000000..a64bb8d
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/07_technicals-runtime-wrappers.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-technicals-runtime-wrappers
+title: Runtime Wrappers
+sidebar_label: Runtime Wrappers
+original_id: technicals-runtime-wrappers
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/07_technicals-user-guidance.md b/documentation/website/versioned_docs/version-0.67.0/07_technicals-user-guidance.md
new file mode 100644
index 0000000..89a515f
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/07_technicals-user-guidance.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-technicals-user-guidance
+title: User Guidance
+sidebar_label: User Guidance
+original_id: technicals-user-guidance
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/08_debugging.md b/documentation/website/versioned_docs/version-0.67.0/08_debugging.md
new file mode 100644
index 0000000..38d46a2
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/08_debugging.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-debugging-debugging
+title: Debugging
+sidebar_label: Debugging
+original_id: debugging-debugging
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/08_monitoring.md b/documentation/website/versioned_docs/version-0.67.0/08_monitoring.md
new file mode 100644
index 0000000..0f6433c
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/08_monitoring.md
@@ -0,0 +1,8 @@
+---
+id: version-0.67.0-debugging-monitoring
+title: Monitoring
+sidebar_label: Monitoring
+original_id: debugging-monitoring
+---
+
+tbd
\ No newline at end of file
diff --git a/documentation/website/versioned_docs/version-0.67.0/09_contribute.md b/documentation/website/versioned_docs/version-0.67.0/09_contribute.md
new file mode 100644
index 0000000..fa9feaf
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/09_contribute.md
@@ -0,0 +1,18 @@
+---
+id: version-0.67.0-community-contribute
+title: Contribute
+sidebar_label: Contribute
+original_id: community-contribute
+---
+
+## Contribute
+
+We welcome contributions to StreamPipes. If you are interested in contributing to StreamPipes, let us know! You'll
+get to know an open-minded and motivated team working together to build the next IIoT analytics toolbox.
+
+Here are some first steps in case you want to contribute:
+* Subscribe to our dev mailing list [dev-subscribe@streampipes.apache.org](dev-subscribe@streampipes.apache.org)
+* Send an email, tell us about your interests and which parts of Streampipes you'd like to contribute (e.g., core or UI)!
+* Ask for a mentor who helps you understanding the code base and guides you through the first setup steps
+* Find an issue in our [Jira](https://issues.apache.org/jira/projects/STREAMPIPES) which is tagged with a _newbie_ tag
+* Have a look at our **developer wiki** at [https://cwiki.apache.org/confluence/display/STREAMPIPES](https://cwiki.apache.org/confluence/display/STREAMPIPES) to learn more about StreamPipes development.
diff --git a/documentation/website/versioned_docs/version-0.67.0/09_get-help.md b/documentation/website/versioned_docs/version-0.67.0/09_get-help.md
new file mode 100644
index 0000000..fc95183
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/09_get-help.md
@@ -0,0 +1,27 @@
+---
+id: version-0.67.0-community-get-help
+title: Get Help
+sidebar_label: Get Help
+original_id: community-get-help
+---
+
+The Apache StreamPipes community is happy to help with any questions or problems you might have.
+
+## Questions
+Subscribe to our user mailing list to ask a question.
+
+[Mailing Lists](https://streampipes.apache.org/mailinglists.html)
+
+To subscribe to the user list, send an email to [users-subscribe@streampipes.apache.org](users-subscribe@streampipes.apache.org)
+
+We also maintain a Slack channel which might be a good way for problems that require interaction:
+
+[Slack Channel](http://slack.streampipes.org)
+
+## Bugs and Feature Requests
+
+If you've found a bug or have a feature that you'd love to see in StreamPipes, feel free to create an issue in our Jira:
+[https://issues.apache.org/jira/projects/STREAMPIPES](https://issues.apache.org/jira/projects/STREAMPIPES)
+
+
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/dev-guide-archetype.md b/documentation/website/versioned_docs/version-0.67.0/dev-guide-archetype.md
new file mode 100644
index 0000000..a89fd68
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/dev-guide-archetype.md
@@ -0,0 +1,7 @@
+---
+id: version-0.67.0-dev-guide-archetype
+title: Start Developing
+sidebar_label: Start Developing
+original_id: dev-guide-archetype
+---
+
diff --git a/documentation/website/versioned_docs/version-0.67.0/user-guide-installation.md b/documentation/website/versioned_docs/version-0.67.0/user-guide-installation.md
new file mode 100644
index 0000000..76d90d4
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/user-guide-installation.md
@@ -0,0 +1,121 @@
+---
+id: version-0.67.0-user-guide-installation
+title: Installation
+sidebar_label: Installation
+original_id: user-guide-installation
+---
+## Prerequisites
+
+### Hardware
+
+* Docker (latest version, see instructions below)
+* Docker Compose (latest version., see instructions below)
+
+### Supported operating systems
+We rely on Docker and support three operating systems for the StreamPipes system
+
+* Linux
+* OSX
+* Windows 10
+    * Please note that older Windows versions are not compatible with Docker. Also Linux VMs under Windows might not work, due to network problems with docker.
+
+### Web Browser
+StreamPipes is a modern web application, therefore you need a recent version of Chrome (recommended), Firefox or Edge.
+
+### Docker
+You need to have Docker installed on your system before you continue with the installation guide.
+
+
+<div class="admonition info">
+<div class="admonition-title">Install Docker</div>
+<p>Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in again) and check that Docker is installed correctly by executing docker-run hello-world</p>
+</div>
+
+<div class="admonition info">
+<div class="admonition-title">Configure Docker</div>
+<p>By default, Docker uses only a limited number of CPU cores and memory.
+       If you run StreamPipes on Windows or on a Mac you need to adjust the default settings.
+       To do that, click on the Docker icon in your tab bar and open the preferences.
+       Go to the advanced preferences and set the **number of CPUs to 6** (recommended) and the **Memory to 4GB**.
+       After changing the settings, Docker needs to be restarted.</p></div>
+
+
+## Install StreamPipes
+
+<div class="tab-content" id="myTabContent">
+    <div class="tab-pane fade show active" id="linux" role="tabpanel" aria-labelledby="linux-tab">
+        <ul style="padding-left:0">
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">1</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                        <a href="https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=incubator/streampipes/installer/0.67.0/apache-streampipes-installer-0.67.0-incubating-source-release.zip">Download</a>
+                        the latest Apache StreamPipes release and extract the zip file to a directory of your choice.
+                    </div>
+                </div>
+            </li>
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">2</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                       In a command prompt, open the folder <code>/compose</code> and run <code>docker-compose up -d</code>.
+                    </div>
+                </div>
+            </li>
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">3</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                        Open your browser, navigate to http://localhost:80 (or the domain name of your server) and finish the setup according to the instructions below.
+                    </div>
+                </div>
+            </li>
+        </ul>
+        </div>
+    </div>
+
+## Setup StreamPipes
+
+Once you've opened the browser at the URL given above, you should see StreamPipes application as shown below.
+To set up the system, enter an email address and a password and click on install.
+At this point, it is not necessary to change anything in the advanced settings menu.
+The installation might take some time, continue by clicking on "Go to login page", once all components are successfully configured.
+
+
+On the login page, enter your credentials, then you should be forwarded to the home page.
+
+Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline!
+
+<div class="my-carousel">
+    <img src="/docs/img/quickstart/setup/01_register_user.png" alt="Set Up User">
+    <img src="/docs/img/quickstart/setup/02_user_set_up.png" alt="SetUp StreamPipes Components">
+    <img src="/docs/img/quickstart/setup/03_login.png" alt="Go to login page">
+    <img src="/docs/img/quickstart/setup/04_home.png" alt="Home page">
+</div>
+
+<div class="admonition error">
+<div class="admonition-title">Errors during the installation process</div>
+<p>In most cases, errors during the installation are due to an under-powered system.<br/>
+If there is a problem with any of the components, please restart the whole system and delete the "config" directory on the server.
+   This directory is in the same folder as the docker-compose.yml file.<br/>
+   Please also make sure that your system meets the hardware requirements as mentioned in the first section of the installation guide.</p>
+</div>
+
+## Next Steps
+
+Now you can continue with the tutorial on page [First steps](user-guide-first-steps.md).
diff --git a/documentation/website/versioned_docs/version-0.67.0/user-guide-introduction.md b/documentation/website/versioned_docs/version-0.67.0/user-guide-introduction.md
new file mode 100644
index 0000000..b746ac8
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.67.0/user-guide-introduction.md
@@ -0,0 +1,62 @@
+---
+id: version-0.67.0-user-guide-introduction-old
+title: Introduction
+sidebar_label: Introduction
+original_id: user-guide-introduction-old
+---
+
+StreamPipes is a framework that enables users to work with data streams.
+It uses a lot of different technologies especially form the fields of big data, distributed computing and semantic web.
+One of the core concepts of StreamPipes is to add a higher semantic layer on top of big data processing technologies to ease their usage.
+StreamPipes is not just a UI, it is a framework with a lot of different capabilities, like modelling new data processing pipelines, execute them in a distributed environment.
+On top it uses semantics to provide guidance to non-technical people for better analyzing their data streams in a self-service manner.
+
+
+
+## Pipelines
+The core concept of StreamPipes are data processing pipelines.
+Those pipelines use data from different sources (Data Streams), then transform it via Processing Elements and store them in an database or send it to third party systems (Data Sinks).
+A brief introduction is given in the following sections.
+At the next page a detailed tour through StreamPies explains all the different features that are available.
+
+
+## Data Streams
+Data Streams represent the primary source for data in StreamPipes.
+A stream is an ordered sequence of events, where an event is described as one or more observation values.
+Those events can come from different sources like sensors, machines, log files or many more.
+It does not matter what kind of serialization format the events have or which kind of transportation protocol the individual data streams use.
+As long as a semantic description is provided StreamPipes is capable of processing the data.
+
+
+## Processing Elements
+Processing Elements are defined as an processor that transforms one or more input event streams to an output event stream. 
+Those transformations can be rather simple like filtering out events based on a predefined rule or more complex by applying algorithms on the data.  
+Processing elements define stream requirements that are a set of minimum properties an incoming event stream must provide. 
+Furthermore, Processing Elements describe their output based on a set of output strategies.
+They also describe further (human) input in form of configuration parameters.
+The Processing Elements can be implemented in multiple technologies.
+This information is not necessary when constructing a pipeline, the user does not need to know where and how the actual algorithm is deployed and executed.
+During the modelling phase it is possible to set configuration parameters, wich are then injected into the program when it is started.
+A description is provided for all parameters and it is ensured by the system that the user can just enter semantically correct values.
+
+
+## Data Sinks
+Data Sinks consume event streams similar to processing elements with the difference that sinks do not provide an output stream, i.e., they are defined as sinks that perform some action or trigger a visualization as a result of a stream transformation.
+The sinks also define stream requirements that must be fulfilled.
+In a pipeline it is not necessary to use a processing element to transform data.
+Often it can make sense to just use a data sink and connect it directly to the sensor to store the raw data into a data store for offline analysis.
+This is very simple with StreamPipes and no additional code must be written to create such a data lake.
+
+
+## Target Audience
+StreamPipes focuses on multiple target groups.
+This guide is for users who interact with the graphical user interface in the browser.
+If you are interested in the technical details or plan to extend the system with new algorithms, please read the Developer Guide.
+The graphical user interface is designed for domain experts who want to analyze data, but are not interested in technical details and do not want to write code.
+The SDK can be used by software developers to extend the framework with new functionality.
+After importing newly developed pipeline elements, they are available to all users of StreamPipes.
+
+
+## Next Steps
+To test StreamPipes on your local environment go to the [installation guide](user-guide-installation.md).
+If you are further interested in the concepts of StreamPipes continue with the [tour](user-guide-tour.md).
diff --git a/documentation/website/versioned_sidebars/version-0.67.0-sidebars.json b/documentation/website/versioned_sidebars/version-0.67.0-sidebars.json
new file mode 100644
index 0000000..b414d84
--- /dev/null
+++ b/documentation/website/versioned_sidebars/version-0.67.0-sidebars.json
@@ -0,0 +1,221 @@
+{
+  "version-0.67.0-documentation": {
+    "πŸš€ Try StreamPipes": [
+      "version-0.67.0-user-guide-introduction",
+      "version-0.67.0-try-installation",
+      "version-0.67.0-try-tutorial"
+    ],
+    "πŸ’‘ Concepts": [
+      "version-0.67.0-concepts-overview"
+    ],
+    "πŸŽ“ Use StreamPipes": [
+      "version-0.67.0-use-connect",
+      "version-0.67.0-use-pipeline-editor",
+      "version-0.67.0-use-managing-pipelines",
+      "version-0.67.0-use-dashboard",
+      "version-0.67.0-use-data-explorer",
+      "version-0.67.0-use-notifications"
+    ],
+    "πŸ“š Pipeline Elements": [
+      {
+        "type": "subcategory",
+        "label": "Adapters",
+        "ids": [
+          "version-0.67.0-pe/org.apache.streampipes.connect.protocol.stream.kafka",
+          "version-0.67.0-pe/org.apache.streampipes.connect.protocol.stream.pulsar",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.coindesk",
+          "version-0.67.0-pe/org.apache.streampipes.protocol.set.file",
+          "version-0.67.0-pe/org.apache.streampipes.connect.protocol.stream.file",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.gdelt",
+          "version-0.67.0-pe/org.apache.streampipes.protocol.set.http",
+          "version-0.67.0-pe/org.apache.streampipes.connect.protocol.stream.http",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.iex.news",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.iex.stocks",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.iss",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.image.set",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.image.stream",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.influxdb.set",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.influxdb.stream",
+          "version-0.67.0-pe/org.apache.streampipes.connect.protocol.stream.mqtt",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.mysql.set",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.mysql.stream",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.netio.mqtt",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.netio.rest",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.opcua",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.plc4x.s7",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.ros",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.simulator.randomdataset",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.slack",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.ti",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.wikipedia.edit",
+          "version-0.67.0-pe/org.apache.streampipes.connect.adapters.wikipedia.new"
+        ]
+      },
+      {
+        "type": "subcategory",
+        "label": "Data Processors",
+        "ids": [
+          "version-0.67.0-pe/org.apache.streampipes.processors.pattern-detection.flink.absence",
+          "version-0.67.0-pe/org.apache.streampipes.processors.aggregation.flink.aggregation",
+          "version-0.67.0-pe/org.apache.streampipes.processors.pattern-detection.flink.and",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.duration-value",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.chunker",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.compose",
+          "version-0.67.0-pe/org.apache.streampipes.processors.aggregation.flink.count",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.count-array",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG",
+          "version-0.67.0-pe/org.apache.streampipes.processors.aggregation.flink.eventcount",
+          "version-0.67.0-pe/org.apache.streampipes.processors.aggregation.flink.rate",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.field-converter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.fieldhasher",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.field-mapper",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.field-renamer",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.frequency",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.frequencychange",
+          "version-0.67.0-pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification",
+          "version-0.67.0-pe/org.apache.streampipes.processor.geo.jvm.geocoding",
+          "version-0.67.0-pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper",
+          "version-0.67.0-pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.flink.languagedetection",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.languagedetection",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.enrich",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.merge",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.namefinder",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.numericalfilter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.numericalfilter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.partofspeech",
+          "version-0.67.0-pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.project",
+          "version-0.67.0-pe/org.apache.streampipes.processor.imageclassification.qrcode",
+          "version-0.67.0-pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection",
+          "version-0.67.0-pe/org.apache.streampipes.processors.pattern-detection.flink.sequence",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.sequence",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure",
+          "version-0.67.0-pe/org.apache.streampipes.processor.geo.flink",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.processor.speed",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.split-array",
+          "version-0.67.0-pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator",
+          "version-0.67.0-pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop",
+          "version-0.67.0-pe/org.apache.streampipes.processors.statistics.flink.statistics-summary",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.stop",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.taskduration",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.textfilter",
+          "version-0.67.0-pe/org.apache.streampipes.processors.filters.jvm.threshold",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.flink.timestamp",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.jvm.tokenizer",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean",
+          "version-0.67.0-pe/org.apache.streampipes.processors.siddhi.increase",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry",
+          "version-0.67.0-pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing",
+          "version-0.67.0-pe/org.apache.streampipes.processors.transformation.jvm.changed-value",
+          "version-0.67.0-pe/org.apache.streampipes.processors.textmining.flink.wordcount"
+        ]
+      },
+      {
+        "type": "subcategory",
+        "label": "Data Sinks",
+        "ids": [
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.jvm.couchdb",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.internal.jvm.dashboard",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.internal.jvm.datalake",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.ditto",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.flink.elasticsearch",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.notifications.jvm.email",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.jvm.influxdb",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.jvm.iotdb",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.jms",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.kafka",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.mqtt",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.internal.jvm.notification",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.jvm.opcua",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.notifications.jvm.onesignal",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.databases.jvm.postgresql",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.pulsar",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.rest",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq",
+          "version-0.67.0-pe/org.apache.streampipes.sinks.notifications.jvm.slack"
+        ]
+      }
+    ],
+    "⚑ Deploy StreamPipes": [
+      "version-0.67.0-deploy-docker",
+      "version-0.67.0-deploy-kubernetes",
+      "version-0.67.0-deploy-use-ssl"
+    ],
+    "πŸ’» Extend StreamPipes": [
+      "version-0.67.0-extend-setup",
+      "version-0.67.0-extend-cli",
+      "version-0.67.0-extend-archetypes",
+      "version-0.67.0-extend-tutorial-data-sources",
+      "version-0.67.0-extend-tutorial-data-processors",
+      "version-0.67.0-extend-tutorial-data-sinks",
+      "version-0.67.0-extend-sdk-event-model",
+      "version-0.67.0-extend-sdk-stream-requirements",
+      "version-0.67.0-extend-sdk-static-properties",
+      "version-0.67.0-extend-sdk-output-strategies"
+    ],
+    "πŸ”§ Technicals": [
+      "version-0.67.0-technicals-architecture",
+      "version-0.67.0-technicals-user-guidance",
+      "version-0.67.0-technicals-runtime-wrappers",
+      "version-0.67.0-technicals-messaging",
+      "version-0.67.0-technical-configuration"
+    ],
+    "πŸ‘ͺ Community": [
+      "version-0.67.0-community-get-help",
+      "version-0.67.0-community-contribute"
+    ]
+  },
+  "version-0.67.0-developer-guide": {
+    "Basics": [
+      "version-0.67.0-dev-guide-introduction",
+      "version-0.67.0-dev-guide-architecture",
+      "version-0.67.0-dev-guide-development-environment",
+      "version-0.67.0-dev-guide-archetype",
+      "version-0.67.0-dev-guide-migration"
+    ],
+    "Tutorials": [
+      "version-0.67.0-dev-guide-tutorial-sources",
+      "version-0.67.0-dev-guide-tutorial-processors",
+      "version-0.67.0-dev-guide-tutorial-sinks"
+    ],
+    "SDK Reference": [
+      "version-0.67.0-dev-guide-sdk-guide-sources",
+      "version-0.67.0-dev-guide-sdk-guide-processors",
+      "version-0.67.0-dev-guide-sdk-guide-sinks",
+      "version-0.67.0-dev-guide-stream-requirements",
+      "version-0.67.0-dev-guide-static-properties",
+      "version-0.67.0-dev-guide-output-strategies",
+      "version-0.67.0-dev-guide-event-model"
+    ],
+    "Configuration": [
+      "version-0.67.0-dev-guide-ssl",
+      "version-0.67.0-dev-guide-configuration"
+    ]
+  },
+  "version-0.67.0-faq": {
+    "FAQ": [
+      "version-0.67.0-faq-common-problems"
+    ]
+  }
+}
diff --git a/documentation/website/versions.json b/documentation/website/versions.json
index e132de1..5165f27 100644
--- a/documentation/website/versions.json
+++ b/documentation/website/versions.json
@@ -1,4 +1,5 @@
 [
+  "0.67.0",
   "0.66.0",
   "0.65.0-pre-asf",
   "0.64.0-pre-asf",