You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@streampipes.apache.org by ri...@apache.org on 2022/03/21 10:21:15 UTC

[incubator-streampipes-website] branch dev updated: Release new docs version 0.69.0

This is an automated email from the ASF dual-hosted git repository.

riemer pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampipes-website.git


The following commit(s) were added to refs/heads/dev by this push:
     new 24bb561  Release new docs version 0.69.0
24bb561 is described below

commit 24bb561cf174129eee6d64ec82e7e19792c4d52d
Author: Dominik Riemer <do...@gmail.com>
AuthorDate: Mon Mar 21 11:21:06 2022 +0100

    Release new docs version 0.69.0
---
 documentation/website/i18n/en.json                 |  52 +++
 .../version-0.69.0/01_try-installation.md          | 138 ++++++++
 .../version-0.69.0/03_use-configurations.md        |  49 +++
 .../version-0.69.0/05_deploy-security.md           |  76 +++++
 .../version-0.69.0/06_extend-archetypes.md         |  65 ++++
 .../versioned_docs/version-0.69.0/06_extend-cli.md | 191 +++++++++++
 .../version-0.69.0/06_extend-first-processor.md    |  58 ++++
 .../version-0.69.0/06_extend-sdk-event-model.md    | 142 ++++++++
 .../version-0.69.0/06_extend-sdk-migration-sd.md   | 117 +++++++
 .../06_extend-sdk-stream-requirements.md           | 179 ++++++++++
 .../version-0.69.0/06_extend-setup.md              |  32 ++
 .../06_extend-tutorial-data-processors.md          | 359 +++++++++++++++++++++
 .../06_extend-tutorial-data-sinks.md               | 231 +++++++++++++
 .../06_extend-tutorial-data-sources.md             | 212 ++++++++++++
 .../version-0.69.0-sidebars.json                   | 240 ++++++++++++++
 documentation/website/versions.json                |   1 +
 16 files changed, 2142 insertions(+)

diff --git a/documentation/website/i18n/en.json b/documentation/website/i18n/en.json
index 818a70a..a2e36bc 100644
--- a/documentation/website/i18n/en.json
+++ b/documentation/website/i18n/en.json
@@ -2456,6 +2456,58 @@
       "version-0.68.0/pe/version-0.68.0-org.apache.streampipes.sinks.notifications.jvm.telegram": {
         "title": "Telegram Publisher",
         "sidebar_label": "Telegram Publisher"
+      },
+      "version-0.69.0/version-0.69.0-try-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.69.0/version-0.69.0-use-configurations": {
+        "title": "Configurations",
+        "sidebar_label": "Configurations"
+      },
+      "version-0.69.0/version-0.69.0-deploy-security": {
+        "title": "Security",
+        "sidebar_label": "Security"
+      },
+      "version-0.69.0/version-0.69.0-extend-archetypes": {
+        "title": "Maven Archetypes",
+        "sidebar_label": "Maven Archetypes"
+      },
+      "version-0.69.0/version-0.69.0-extend-cli": {
+        "title": "StreamPipes CLI",
+        "sidebar_label": "StreamPipes CLI"
+      },
+      "version-0.69.0/version-0.69.0-extend-first-processor": {
+        "title": "Your first data processor",
+        "sidebar_label": "Your first data processor"
+      },
+      "version-0.69.0/version-0.69.0-extend-sdk-event-model": {
+        "title": "SDK Guide: Event Model",
+        "sidebar_label": "SDK: Event Model"
+      },
+      "version-0.69.0/version-0.69.0-extend-sdk-migration-service-discovery": {
+        "title": "Migration Guide: New Service Discovery in 0.69.0",
+        "sidebar_label": "Migration Guide: 0.69.0"
+      },
+      "version-0.69.0/version-0.69.0-extend-sdk-stream-requirements": {
+        "title": "SDK Guide: Stream Requirements",
+        "sidebar_label": "SDK: Stream Requirements"
+      },
+      "version-0.69.0/version-0.69.0-extend-setup": {
+        "title": "Development Setup",
+        "sidebar_label": "Development Setup"
+      },
+      "version-0.69.0/version-0.69.0-extend-tutorial-data-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.69.0/version-0.69.0-extend-tutorial-data-sinks": {
+        "title": "Tutorial: Data Sinks",
+        "sidebar_label": "Tutorial: Data Sinks"
+      },
+      "version-0.69.0/version-0.69.0-extend-tutorial-data-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
       }
     },
     "links": {
diff --git a/documentation/website/versioned_docs/version-0.69.0/01_try-installation.md b/documentation/website/versioned_docs/version-0.69.0/01_try-installation.md
new file mode 100644
index 0000000..48dbf39
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/01_try-installation.md
@@ -0,0 +1,138 @@
+---
+id: version-0.69.0-try-installation
+title: Installation
+sidebar_label: Installation
+original_id: try-installation
+---
+
+The easiest way to install StreamPipes is our Docker-based installation. For production-grade deployments, we also
+recommend looking at our Kubernetes support, which is also part of the installation kit.
+
+## Prerequisites
+
+The Docker-based installation requires **Docker** and **Docker Compose** to be installed on the target machine.
+Installation instructions can be found below.
+
+<div class="admonition info">
+<div class="admonition-title">Install Docker</div>
+<p>Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in again) and check that Docker is installed correctly by executing docker-run hello-world</p>
+</div>
+
+<div class="admonition info">
+<div class="admonition-title">Configure Docker</div>
+<p>By default, Docker uses only a limited number of CPU cores and memory.
+       If you run StreamPipes on Windows or on a Mac you need to adjust the default settings.
+       To do that, click on the Docker icon in your tab bar and open the preferences.
+       Go to the advanced preferences and set the **number of CPUs to 6** (recommended) and the **Memory to 4GB**.
+       After changing the settings, Docker needs to be restarted.</p></div>
+
+### Supported operating systems
+
+The Docker-based installation supports the operating systems **Linux**, **Mac OS X** and **Windows 10**. Older windows
+versions are not fully compatible with Docker. Linux VMs running under Windows might cause network problems with Docker,
+therefore some manual work might be needed to make StreamPipes run properly.
+
+### Web Browser
+
+The StreamPipes application itself will be accessible through a web browser. We recommend a recent version of Chrome (
+best experience), Firefox or Edge.
+
+## Install StreamPipes
+
+<ul style="padding-left:0">
+  <li class="installation-step" style="margin-top:20px;">
+      <div class="wrapper-container" style="">
+          <div class="wrapper-step">
+              <span class="fa-stack fa-2x">
+                   <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                   <strong class="fa-stack-1x" style="color:white;">1</strong>
+              </span>
+          </div>
+          <div class="wrapper-instruction">
+              Download the latest Apache StreamPipes release and extract the zip file to a directory of your choice.
+                  <table class="table" style="margin-top:30px;">
+                      <thead>
+                      <tr style="background:rgb(27, 20, 100);color:white;">
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">File</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Version</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Release Date</th>
+                          <th scope="col" style="border-bottom:0px;border-top:0px;">Signatures</th>
+                      </tr>
+                      </thead>
+                      <tbody>
+                      <tr>
+                          <td><a href="https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=incubator/streampipes/0.69.0/apache-streampipes-0.69.0-incubating-source-release.zip">apache-streampipes-0.69.0-incubating-source-release.zip</a></td>
+                          <td>0.69.0</td>
+                          <td>2022-03-21</td>
+                          <td>
+                              <a href="https://downloads.apache.org/incubator/streampipes/0.69.0/apache-streampipes-0.69.0-incubating-source-release.zip.sha512">SHA</a>
+                              <a href="https://downloads.apache.org/incubator/streampipes/0.69.0/apache-streampipes-0.69.0-incubating-source-release.zip.asc">PGP</a>
+                          </td>
+                      </tr>
+                      </tbody>
+                  </table>
+              <div class="row">
+                  <div class="alert alert-info" role="alert">
+                    The above release file should be verified using the PGP signatures and the <a href="https://downloads.apache.org/incubator/streampipes/KEYS">project release KEYS</a>. See the official ASF <a target="asf" href="https://www.apache.org/dyn/closer.cgi#verify">verification instructions</a> for a description of using the PGP and KEYS files for verification. A SHA512 checksum is also provided as an additional verification method.
+                  </div>
+              </div>
+          </div>
+      </div>
+  </li>
+  <li class="installation-step">
+      <div class="wrapper-container">
+          <div class="wrapper-step">
+              <span class="fa-stack fa-2x">
+                   <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                   <strong class="fa-stack-1x" style="color:white;">2</strong>
+              </span>
+          </div>
+          <div class="wrapper-instruction">
+             <div style="margin-bottom:5px;">In a command prompt, open the folder <code>installer/compose</code> and run <code>docker-compose up -d</code>.
+                <div class="row" style="margin-top:10px;">              
+                    <div class="alert alert-info" role="alert">The folder contains two docker-compose files: The standard file contains the so-called <i>lite</i> installation, which includes less pipeline elements, but also demands for less performant hardware. See the Github <a href="https://github.com/apache/incubator-streampipes-installer/tree/master/compose">README</a> for complete usage instructions.</div>
+            </div>
+            </div>
+        </div>
+    </div>
+  </li>
+    <li class="installation-step">
+        <div class="wrapper-container" style="align-items: center;justify-content: center;">
+            <div class="wrapper-step">
+                <span class="fa-stack fa-2x">
+                     <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                     <strong class="fa-stack-1x" style="color:white;">3</strong>
+                </span>
+            </div>
+            <div class="wrapper-instruction">
+                Open your browser, navigate to http://localhost:80 (or the domain name of your server) and log in using the default email <code>admin@streampipes.apache.org</code> and default password <code>admin</code>.
+            </div>
+        </div>
+    </li>
+</ul>
+
+## Setup StreamPipes
+
+Once you've opened the browser at the URL given above, you should see the StreamPipes application as shown below. At initial startup, StreamPipes automatically performs an installation process.
+After the installation has finished, continue by clicking on "Go to login
+page", once all components are successfully configured.
+
+On the login page, enter your credentials, then you should be forwarded to the home page.
+
+Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline!
+
+<div class="my-carousel docs-carousel">
+    <img src="/docs/img/01_try-installation/03_login.png" alt="Go to login page">
+    <img src="/docs/img/01_try-installation/04_home.png" alt="Home page">
+</div>
+
+<div class="admonition error">
+<div class="admonition-title">Errors during the installation process</div>
+<p>In most cases, errors during the installation are due to an under-powered system.<br/>
+If there is a problem with any of the components, please restart the whole system (<code>docker-compose down</code> and eventually also delete the volumes).
+   Please also make sure that your system meets the hardware requirements as mentioned in the first section of the installation guide.</p>
+</div>
+
+## Next Steps
+
+That's it! To ease your first steps with StreamPipes, we've created an [interactive tutorial](01_try-tutorial.md).
diff --git a/documentation/website/versioned_docs/version-0.69.0/03_use-configurations.md b/documentation/website/versioned_docs/version-0.69.0/03_use-configurations.md
new file mode 100644
index 0000000..5978772
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/03_use-configurations.md
@@ -0,0 +1,49 @@
+---
+id: version-0.69.0-use-configurations
+title: Configurations
+sidebar_label: Configurations
+original_id: use-configurations
+---
+
+The configuration section is an admin-only interface for system-wide settings.
+
+## General configuration
+
+<img class="docs-image" src="/docs/img/03_use-configurations/01_general-configuration.png" alt="General configuration">
+
+The general configuration serves to provide basic system settings. The basic settings allow to configure the app name (which is used, e.g., for mails sent by StreamPipes). 
+Additionally, the externally available host and port can be set which is used by the mail system to add links to emails.
+
+Furthermore, self-registration and password recovery features can be activated in this view. Note that both features require a working email configuration.
+
+## Datalake
+
+<img class="docs-image" src="/docs/img/03_use-configurations/02_datalake-configuration.png" alt="Datalake configuration">
+
+Here, stored data lake databases can be truncated or deleted. The view also gives information on the number of data points currently stored in a measurement series.
+
+## Email configuration
+
+<img class="docs-image" src="/docs/img/03_use-configurations/03_email-configuration.png" alt="Email configuration">
+
+In this section, the email configuration is set. The email configuration is used to send mails to users. Most standard mail server settings are supported. The configuration can be validated by triggering a test mail that is sent to a given recipient.
+
+## Messaging
+
+<img class="docs-image" src="/docs/img/03_use-configurations/04_messaging-configuration.png" alt="Messaging configuration">
+
+Messaging configuration is used to control parameters used for communication between pipeline elements. Individual Kafka settings can be configured, as well as the priority of selected message formats and protocols during pipeline creation.
+
+## Pipeline Element Configuration
+
+<img class="docs-image" src="/docs/img/03_use-configurations/05_pipeline-element-configuration.png" alt="Pipeline element configuration">
+
+Individual configurations of extensions services are available in this view. The available configurations depend on the provided configuration variables in the service definition of each extensions service.
+
+## Security
+
+<img class="docs-image" src="/docs/img/03_use-configurations/06_security-configuration.png" alt="Messaging configuration">
+
+The security configuration allows to manage existing user accounts, service accounts and groups. New users can be added and roles can be assigned.
+
+Please also read more about security [here](05_deploy-security.md).
diff --git a/documentation/website/versioned_docs/version-0.69.0/05_deploy-security.md b/documentation/website/versioned_docs/version-0.69.0/05_deploy-security.md
new file mode 100644
index 0000000..7aca147
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/05_deploy-security.md
@@ -0,0 +1,76 @@
+---
+id: version-0.69.0-deploy-security
+title: Security
+sidebar_label: Security
+original_id: deploy-security
+---
+
+## Overriding default settings
+
+At installation time, StreamPipes checks for available environment variables relevant for the securing the system. If they are not set, it will use the default values.
+
+The following variables are checked by the core at installation time:
+
+* SP_INITIAL_ADMIN_EMAIL The email address of the initial administrator.
+* SP_INITIAL_ADMIN_PASSWORD The password of the initial administrator.
+* SP_INITIAL_CLIENT_USER The initial client user, used by the extensions modules to make authenticated API requests to the core.
+* SP_INITIAL_CLIENT_SECRET The default password of the initial client user.
+* SP_SETUP_INSTALL_PIPELINE_ELEMENTS Indicates whether pipeline elements should be installed.
+* SP_ENCRYPTION_PASSCODE The encryption passcode, used for securely storing secrets (e.g., database connection strings).
+* SP_JWT_SECRET The JWT secret, used for signing JWT tokens.
+
+In addition, all extensions services that perform requests to the core will need to have the following environment variables set:
+
+* SP_CLIENT_USER The client user, used by the extensions modules to make authenticated API requests to the core.
+* SP_CLIENT_SECRET The password of the client user.
+
+Note that there are default values for all environment variables that are set at installation time - make sure to change these settings when moving to production!
+
+## Configuration
+
+Most security-related settings can be set in the configuration section of StreamPipes. The *General* section allows to set self-service registration and password recovery (both are disabled by default and require a valid email configuration).
+In the *Security* section, users, service accounts, roles and groups can be configured.
+
+
+## User types
+
+StreamPipes distinguishes between User Accounts (real users that interact with StreamPipes over the UI or an API) and Service Accounts (user-independent accounts which solely use StreamPipes over the API).
+
+User accounts are typically used by extensions service that require API access to the core (e.g., to get a list of running pipelines).
+
+## Permissions
+
+StreamPipes v0.69.0 comes with more advanced mechanisms to manage permissions.
+For each major resource (pipeline elements, pipelines, StreamPipes Connect adapters, dashboards, data explorer views), permissions can be assigned individually to users and groups.
+
+To ease permission handling, StreamPipes comes with a default number of roles with pre-assigned privileges:
+
+### Roles
+
+* Admin The administrator role has full access to all resources.
+* Service Admin The service administrator role has full access to all resources, but has no access to the UI.
+* Pipeline Admin has full control of pipelines (create, edit, delete, start, stop, pause, resume, etc.).
+* Pipeline User has limited control of pipelines (read only).
+* Dashboard Admin has full control of dashboards (create, edit, delete, etc.).
+* Dashboard User has limited control of dashboards (read only).
+* Data Explorer Admin has full control of data explorer views (create, edit, delete, etc.).
+* Data Explorer User has limited control of data explorer views (read only).
+* Connect Admin has full control of StreamPipes Connect adapters (create, edit, delete, etc.).
+
+### Groups
+
+Roles can be either assigned to specific users or groups. Any group can contain several members. 
+The permissions of a user are the union of the permissions of all roles assigned to the user and the groups to which the user belongs.
+
+### Changing permissions
+
+Any resource has a resource owner, which is the authority that created the resource. Resources can be either public or private. Public resources are available to all users, while the user role determines what the user can do with the resource.
+E.g., a public pipeline created by a user of role ROLE_ADMIN can be edited by all users with role PIPELINE_ADMIN, while the same pipeline can be read by all users with role PIPELINE_USER.
+
+Permissions can only be changed by admin users currently.
+In the overview section of each resource (e.g., pipelines and dashboards), a permission dialog is available to users with role ROLE_ADMIN. The dialog allows to assign users and groups to the individual resource.
+
+
+
+
+
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-archetypes.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-archetypes.md
new file mode 100644
index 0000000..9e7375b
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-archetypes.md
@@ -0,0 +1,65 @@
+---
+id: version-0.69.0-extend-archetypes
+title: Maven Archetypes
+sidebar_label: Maven Archetypes
+original_id: extend-archetypes
+---
+
+In this tutorial we explain how you can use the Maven archetypes to develop your own StreamPipes processors and sinks.
+We use IntelliJ in this tutorial, but it works with any IDE of your choice.
+
+## Prerequisites
+You need to have Maven installed, further you need an up and running StreamPipes installation on your development computer.
+
+## Create Project
+To create a new project, we provide multiple Maven Archteypes.
+Currently, we provide archetypes for standalone Java-based microservices and archetypes for the experimental Flink wrapper.
+The commands required to create a new pipeline element project can be found below. Make sure that you select a version compatible with your StreamPipes installation.
+Copy the command into your terminal to create a new project.
+The project will be created in the current folder.
+First, the ``groupId`` of the resulting Maven artifact must be set.
+We use ``groupId``: ``org.example`` and ``artifactId``: ``ExampleProcessor``.
+You can keep the default values for the other settings, confirm them by hitting enter.
+
+The current {sp.version} is 0.69.0 (for a pre-release version, use the SNAPSHOT appendix, e.g. 0.69.0-SNAPSHOT)
+
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-extensions-jvm  \
+  -DarchetypeVersion={sp.version}
+```
+<details class="info">
+    <summary>Other archetypes</summary>
+
+## Processors Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-flink  \
+  -DarchetypeVersion={sp.version}
+```
+
+## Sinks Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.apache.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-sinks-flink  \
+  -DarchetypeVersion={sp.version}
+```
+</details>
+
+
+## Project structure
+Open the project in your IDE.
+If everything worked, the structure should look similar to the following image.
+In the *main* package, it is defined which processors / sinks you want to activate and the *pe.example* package contains two skeletons for creating a data processor and sink.
+For details, have a look at the other parts of the Developer Guide, where these classes are explained in more depth.
+
+<img src="/docs/img/archetype/project_structure.png" width="30%" alt="Project Structure">
+
+## Next steps
+
+Click [here](06_extend-first-processor.md) to learn how to create your first data processor.
+
+
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-cli.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-cli.md
new file mode 100644
index 0000000..c1b2480
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-cli.md
@@ -0,0 +1,191 @@
+---
+id: version-0.69.0-extend-cli
+title: StreamPipes CLI
+sidebar_label: StreamPipes CLI
+original_id: extend-cli
+---
+
+The StreamPipes command-line interface (CLI) is focused on developers in order to provide an easy entrypoint to set up a suitable dev environment, either planning on developing
+
+* new extensions such as **connect adapters, processors, sinks** or,
+* new core features for **backend** and **ui**.
+
+The main difference between the standard Docker/K8s installation is an improved communication between services running as containers and services running locally for development.
+
+The CLI can be found in the [main repository](https://github.com/apache/incubator-streampipes/tree/master/installer/cli) or in the ``compose/cli`` folder of the downloaded source code.
+
+## TL;DR
+
+```bash
+streampipes env --list
+[INFO] Available StreamPipes environment templates:
+pipeline-element
+...
+streampipes env --set pipeline-element
+streampipes up -d
+```
+> **NOTE**: use `./streampipes` if you haven't add it to the PATH and sourced it (see section "Run `streampipes` from anywhere?").
+
+## Prerequisites
+The CLI is basically a wrapper around multiple `docker` and `docker-compose` commands plus some additional sugar.
+
+* Docker >= 17.06.0
+* Docker-Compose >= 1.26.0 (Compose file format: 3.4)
+* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge
+* For Windows Developer: GitBash only
+
+
+Tested on: **macOS**, **Linux**, **Windows***)
+
+> **NOTE**: *) If you're using Windows the CLI only works in combination with GitBash - CMD, PowerShell won't work.
+
+
+## CLI commands overview
+
+```
+StreamPipes CLI - Manage your StreamPipes environment with ease
+
+Usage: streampipes COMMAND [OPTIONS]
+
+Options:
+  --help, -h      show help
+  --version, -v   show version
+
+Commands:
+  clean       Remove StreamPipes data volumes, dangling images and network
+  down        Stop and remove StreamPipes containers
+  env         Inspect and select StreamPipes environments
+  info        Get information
+  logs        Get container logs for specific container
+  ps          List all StreamPipes container for running environment
+  pull        Download latest images from Dockerhub
+  restart     Restart StreamPipes environment
+  up          Create and start StreamPipes container environment
+
+Run 'streampipes COMMAND --help' for more info on a command.
+```
+
+## Usage: Along dev life-cycle
+
+**List** available environment templates.
+```bash
+streampipes env --list
+```
+
+**Inspect** services in an available environment to know what kind of services it is composed of.
+```bash
+streampipes env --inspect pipeline-element
+```
+
+**Set** environment, e.g. `pipeline-element`, if you want to write a new pipeline element.
+```bash
+streampipes env --set pipeline-element
+```
+
+**Start** environment ( default: `dev` mode). Here the service definition in the selected environment is used to start the multi-container landscape.
+> **NOTE**: `dev` mode is enabled by default since we rely on open ports to core service such as `consul`, `couchdb`, `kafka` etc. to reach from the IDE when developing. If you don't want to map ports (except the UI port), then use the `--no-ports` flag.
+
+```bash
+streampipes up -d
+# start in production mode with unmapped ports
+# streampipes up -d --no-ports
+```
+Now you're good to go to write your new pipeline element :tada: :tada: :tada:
+
+> **HINT for extensions**: Use our [Maven archetypes](https://streampipes.apache.org/docs/docs/dev-guide-archetype/) to setup a project skeleton and use your IDE of choice for development. However, we do recommend using IntelliJ.
+
+> **HINT for core**: To work on `backend` or `ui` features you need to set the template to `backend` and clone the core repository [incubator-streampipes](https://github.com/apache/incubator-streampipes) - check the prerequisites there for more information.
+
+**Stop** environment and remove docker container
+```bash
+streampipes down
+# want to also clean docker data volumes when stopping the environment?
+# streampipes down -v
+```
+
+## Additionally, useful commands
+
+**Start individual services only?** We got you! You chose a template that suits your needs and now you only want to start individual services from it, e.g. only Kafka and Consul.
+
+> **NOTE**: the service names need to be present and match your current `.spenv` environment.
+
+```bash
+streampipes up -d kafka consul
+```
+
+**Get current environment** (if previously set using `streampipes env --set <environment>`).
+```bash
+streampipes env
+```
+
+**Get logs** of specific service and use optional `--follow` flag to stay attached to the logs.
+```bash
+streampipes logs --follow backend
+```
+
+**Update** all services of current environment
+```bash
+streampipes pull
+```
+
+**Restart** all services of current environment or specific services
+```bash
+streampipes restart
+# restart backend & consul
+# streampipes restart backend consul
+```
+
+**Clean** your system and remove created StreamPipes Docker volumes, StreamPipes docker network and dangling StreamPipes images of old image layers.
+```bash
+streampipes clean
+# remove volumes, network and dangling images
+# streampipes clean --volumes
+```
+
+## Modify/Create an environment template
+As of now, this step has to be done **manually**. All environments are located in `environments/`.
+
+```bash
+├── adapter               # developing a new connect adapter
+├── backend               # developing core backend features
+├── basic                 # wanna run core, UI, connect etc from the IDE?
+├── full                  # full version containing more pipeline elements
+├── lite                  # few pipeline elements, less memory  
+├── pipeline-element      # developing new pipeline-elements
+└── ui                    # developing UI features
+```
+**Modifying an existing environment template**. To modify an existing template, you can simply add a `<YOUR_NEW_SERVICE>` to the template.
+> **NOTE**: You need to make sure, that the service your are adding exists in `deploy/standalone/service/<YOUR_NEW_SERVICE>`. If your're adding a completely new service take a look at existing ones, create a new service directory and include a `docker-compose.yml` and `docker-compose.dev.yml` file.
+
+```
+[environment:backend]
+activemq
+kafka
+...
+<YOUR_NEW_SERVICE>
+```
+
+**Creating a new** environment template. To create a new environment template, place a new file `environments/<YOUR_NEW_ENVIRONMENT>` in the template directory. Open the file and use the following schema.
+> **IMPORTANT**: Please make sure to have `[environment:<YOUR_NEW_ENVIRONMENT>]` header in the first line of your new template matching the name of the file. Make sure to use small caps letters (lowercase) only.
+
+```
+[environment:<YOUR_NEW_ENVIRONMENT>]
+<SERVICE_1>
+<SERVICE_2>
+...
+```
+
+## Run `streampipes` from anywhere? No problem
+Simply add the path to this cli directory to your `$PATH` (on macOS, Linux) variable, e.g. in your `.bashrc` or `.zshrc`, or `%PATH%` (on Windows).
+
+For **macOS**, or **Linux**:
+
+```bash
+export PATH="/path/to/incubator-streampipes-installer/cli:$PATH"
+```
+
+For **Windows 10**, e.g. check this [documentation](https://helpdeskgeek.com/windows-10/add-windows-path-environment-variable/).
+
+
+## Upgrade to new version
+To upgrade to a new version, simply edit the version tag `SP_VERSION` in the `.env` file.
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-first-processor.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-first-processor.md
new file mode 100644
index 0000000..e814610
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-first-processor.md
@@ -0,0 +1,58 @@
+---
+id: version-0.69.0-extend-first-processor
+title: Your first data processor
+sidebar_label: Your first data processor
+original_id: extend-first-processor
+---
+
+In this section, we will explain how to start a pipeline element service and install it using the StreamPipes UI.
+
+Open the class *ExampleDataProcessor* and edit the ``onEvent`` method to print the incoming event, log it to the console and send it to the next component without changing it.
+
+```java
+@Override
+public void onEvent(Event event, SpOutputCollector collector) {
+    // Print the incoming event on the console
+    System.out.println(event);
+
+    // Hand the incoming event to the output collector without changing it.
+    collector.collect(event);
+}
+```
+
+## Start Processor
+Starting from StreamPipes 0.69.0, the IP address of an extensions service (processor, adapter or sink) will be auto-discovered upon start.
+The auto-discovery is done by the StreamPipes service discovery mechanism and should work for most setups.
+Once you start an extensions service, you will see the chosen IP in printed in the console. Make sure that this IP does not point to localhost (127.0.0.1).
+If you see such an IP or the extensions service complains that it cannot resolve the IP, you can manually set the IP address of the extensions service. You can do so by providing an <code>SP_HOST</code> environment variable.
+
+
+To check if the service is up and running, open the browser on *'localhost:8090'* (or the port defined in the service definition). The machine-readable description of the processor should be visible as shown below.
+
+<img src="/docs/img/archetype/endpoint.png" width="90%" alt="Project Structure">
+
+
+<div class="admonition error">
+<div class="admonition-title">Common Problems</div>
+<p>
+If the service description is not shown on 'localhost:8090', you might have to change the port address.
+This needs to be done in the configuration of your service, further explained in the configurations part of the developer guide.
+
+If the service does not show up in the StreamPipes installation menu, click on 'MANAGE ENDPOINTS' and add 'http://<span></span>YOUR_IP_OR_DNS_NAME:8090'.
+Use the IP or DNS name you provided as the SP_HOST variable or the IP (if resolvable) found by the auto-discovery service printed in the console.
+After adding the endpoint, a new processor with the name *Example* should show up.
+</p>
+</div>
+
+Now you can go to StreamPipes.
+Your new processor *'Example'* should now show up in the installation menu ("Install Pipeline Elements" in the left navigation bar).
+Install it, then switch to the pipeline view and create a simple pipeline that makes use of your newly created processor.
+In case you opened the StreamPipes installation for the first time, it should have been automatically installed during the setup process.
+
+<img src="/docs/img/archetype/example_pipeline.png" width="80%" alt="Project Structure">
+
+Start this pipeline.
+Now you should see logging messages in your console and, once you've created a visualization, you can also see the resulting events of your component in StreamPipes.
+
+Congratulations, you have just created your first processor!
+From here on you can start experimenting and implement your own algorithms.
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-event-model.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-event-model.md
new file mode 100644
index 0000000..11a762b
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-event-model.md
@@ -0,0 +1,142 @@
+---
+id: version-0.69.0-extend-sdk-event-model
+title: SDK Guide: Event Model
+sidebar_label: SDK: Event Model
+original_id: extend-sdk-event-model
+---
+
+## Introduction
+
+This guide explains the usage of the event model to manipulate runtime events for data processors and data sink.
+
+## Prerequisites
+
+This guide assumes that you are already familiar with the basic setup of [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+### Property Selectors
+
+In most cases, fields that are subject to be transformed by pipeline elements are provided by the assigned ``MappingProperty`` (see the guide on [static properties](dev-guide-static-properties.md)).
+
+Mapping properties return a ``PropertySelector`` that identifies a field based on (i) the **streamIndex** and (ii) the runtime name of the field.
+Let's assume we have an event with the following structure:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "sensor1",
+    "running" : true,
+    "location" : {"latitude" : 34.4, "longitude" : -47},
+    "lastValues" : [45, 22, 21]
+}
+```
+
+In addition, we assume that a data processor exists (with one input node) that converts the temperature value (measured in degrees celsius) to a degree fahrenheit value.
+In this case, a mapping property (selected by the pipeline developer in the StreamPipes UI) would link to the ``temperature`` field of the event.
+
+The mapping property value will be the ``PropertySelector`` of the temperature value, which looks as follows:
+
+```
+s0::temperature
+```
+
+``s0`` identifies the stream (in this case, only one input streams exist, but as data processors might require more than one input stream, a stream identifier is required), while the appendix identifies the runtime name.
+
+Note: If you add a new field to an input event, you don't need to provide the selector, you can just assign the runtime name as defined by the [output strategy](dev-guide-output-strategies.md).
+
+### Reading Fields
+
+You can get a field from an event by providing the corresponding selector:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  PrimitiveField temperatureField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsPrimitive();
+  }
+
+```
+
+Similarly, if your mapping property links to a nested property, use
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  NestedField nestedField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsNested();
+  }
+
+```
+
+and for a list-based field:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  ListField listField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsList();
+  }
+
+```
+
+### Parsing Fields
+
+#### Primitive Fields
+
+A ``PrimitiveField`` contains convenience methods to directly cast a field to the target datatype:
+
+```java
+
+// parse the value as a float datatype
+Float temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsFloat();
+
+// or do the same with a double datatype
+Double temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsDouble();
+
+// extracting a string
+String deviceId = event.getFieldBySelector(deviceIdSelector).getAsPrimitive().getAsString();
+
+// this also works for extracting fields from nested fields:
+Double latitude = event.getFieldBySelector(latitudeSelector).getAsPrimitive().getAsDouble();
+
+// extracting boolean values
+Boolean running = event.getFieldBySelector(runningSelector).getAsPrimitive().getAsBoolean();
+```
+
+In rare cases, you might want to receive a field directly based on the runtime name as follows:
+
+```java
+Double temperature = event.getFieldByRuntimeName("temperature").getAsPrimitive().getAsDouble();
+```
+
+#### List Fields
+
+Lists can also be retrieved by providing the corresponding selector and can automatically be parsed to a list of primitive datatypes:
+
+```java
+
+List<Integer> lastValues = event.getFieldBySelector(lastValueSelector).getAsList().parseAsSimpleType(Integer.class);
+
+```
+
+(coming soon: parsing complex lists)
+
+
+### Adding/Updating Fields
+
+Primitive fields can easily be added to an event by providing the runtime name and the object:
+
+```java
+
+    // add a primitive field with runtime name "city" and value "Karlsruhe"
+    event.addField("city", "Karlsruhe");
+
+    // remove the field "temperature" from the event
+    event.removeFieldBySelector(temperatureSelector);
+
+    // add a new field
+    event.addField("fahrenheit", 48);
+```
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-migration-sd.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-migration-sd.md
new file mode 100644
index 0000000..1c3267d
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-migration-sd.md
@@ -0,0 +1,117 @@
+---
+id: version-0.69.0-extend-sdk-migration-service-discovery
+title: Migration Guide: New Service Discovery in 0.69.0
+sidebar_label: Migration Guide: 0.69.0
+original_id: extend-sdk-migration-service-discovery
+---
+
+
+## Introduction
+As part of our roadmap towards a release 1.0, Apache StreamPipes 0.69.0 features a new service discovery approach along with performance improvements related to a new storage layer for pipeline element descriptions.
+
+The new service discovery approach is better suited for cloud-native scenarios, as the hostname of a pipeline element is now decoupled from its description. As such, StreamPipes now supports recovery of pipeline elements independent from their assigned host.
+In addition, the new approach simplifies development of StreamPipes, e.g., in cases where the core is running in Docker and pipeline elements are developed on a local machine. In this case, the IP of the host machine should now be auto-discovered so that provision of environement variables should now be obsolete in most cases.
+The second large improvement is related to the replacement of RDF4J as the storage engine with a NoSQL database. This leads to much faster load times (you will notice this especially at system startup).
+
+While we are heavily working towards a stable release 1.0, we decided to put our efforts into the remaining features required for 1.0 and do not provide an auto-migration related to some breaking changes.
+Therefore, we recommend to reinstall StreamPipes when updating to 0.69.0. We currently plan to have at most two more releases before releasing the first 1.x version of StreamPipes.
+
+## Installation
+* Before upgrading to 0.69.0, clean any existing installation (e.g., by running ``docker-compose down -v``) and make sure that no volumes of StreamPipes are left.
+* Upgrade to the latest installer version (currently the dev branch of incubator-streampipes-installer)
+* Upon restart, make sure that the setup dialog appears (make sure that the new StreamPipes logo appears) and re-initialize the system.
+
+## SDK changes
+
+0.69.0 comes with a new ``ServiceDefinitionBuilder`` for pipelines, which simplifies the definition of a pipeline element. 
+
+The ServiceDefinitionBuilder requires an ID of your extensions service, an optional title and description and a default port.
+It is best to provide 8090 as the default port, so that this will be the standard port of all StreamPipes extensions services at deployment time in a containerized environment.
+The port port can always be overriden by providing an ``SP_PORT`` environment variable.
+
+### Init class
+
+Modify the Init class of your pipeline element service as follows:
+
+```java
+public class ExamplesInit extends StandaloneModelSubmitter {
+
+  public static void main(String[] args) {
+    new ExamplesInit().init();
+  }
+
+  @Override
+  public SpServiceDefinition provideServiceDefinition() {
+    return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm",
+            "StreamPipes Code Examples",
+            "",
+            8090)
+            .registerMessagingProtocols(new SpKafkaProtocolFactory(), new SpJmsProtocolFactory())
+            .registerMessagingFormats(new JsonDataFormatFactory())
+            .registerPipelineElement(new MyPipelineElementController())
+            .registerAdapter(new MyAdapter())
+            .build();
+  }
+}
+```
+
+You can now easily define a StreamPipes extensions service that supports both custom adapters and pipeline elements by using the following Maven dependency:
+This is optional and no changes to your existing Maven dependencies (except the version, e.g., 0.69.0-SNAPSHOT) are required.
+
+```maven
+<dependency>
+    <groupId>org.apache.streampipes</groupId>
+    <artifactId>streampipes-container-extensions</artifactId>
+</dependency>
+```
+
+
+### Configs
+Prior to version 0.69.0, additionally configs had to be provided in a separate ``Config`` class. This is now obsolete - configs can be directly provided within the builder class as follows:
+
+```java
+
+ @Override
+  public SpServiceDefinition provideServiceDefinition() {
+    return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm",
+            "StreamPipes Code Examples",
+            "",
+            8090)
+            .registerPipelineElement(new MyPipelineElement())
+            .registerAdapter(new MyAdapter())
+            .addConfig("key", 1)
+            .addConfig("my-string-config", "myvalue")
+            .build();
+  }
+```
+
+Configs can be easily accessed from the ``EventProcessorRuntimeContext`` (or ``EventSinkRuntimeContext``):
+
+```java
+@Override
+  public void onInvocation(Parameters params, 
+                            SpOutputCollector spOutputCollector, 
+                            EventProcessorRuntimeContext context) {
+  
+    Integer myConfigValue = context.getConfigStore().getConfig().getInteger("key");
+  }
+```
+
+
+### Service Discovery
+An extensions service can be started by executing the Init class. StreamPipes will now automatically select the proper service IP address and register the service in Consul.
+You can inspect the selected IP address in the console:
+
+```
+16:41:58.342 SP [main] INFO  o.a.s.commons.networking.Networking - Using auto-discovered IP: 172.30.80.1
+16:41:58.364 SP [main] INFO  o.a.s.commons.networking.Networking - Using port from provided environment variable SP_PORT: 6025
+16:41:58.367 SP [main] INFO  o.a.s.c.init.DeclarersSingleton - Registering 0 configs in key/value store
+16:41:58.400 SP [main] INFO  o.a.s.s.consul.ConsulProvider - Checking if consul is available...
+16:41:58.419 SP [main] INFO  o.a.s.s.consul.ConsulProvider - Successfully connected to Consul
+```
+
+In some (rare) cases, a non-resolvable IP will be selected. In this case, you can manually override the IP by providing a ``SP_HOST`` environment variable. This falls back to a similar behaviour as in pre-0.69.0-versions and will use the manually provided IP.
+
+
+
+
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-stream-requirements.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-stream-requirements.md
new file mode 100644
index 0000000..ee9ccf2
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-sdk-stream-requirements.md
@@ -0,0 +1,179 @@
+---
+id: version-0.69.0-extend-sdk-stream-requirements
+title: SDK Guide: Stream Requirements
+sidebar_label: SDK: Stream Requirements
+original_id: extend-sdk-stream-requirements
+---
+
+## Introduction
+
+Data processors and data sinks can define ``StreamRequirements``. Stream requirements allow pipeline elements to express requirements on an incoming event stream that are needed for the element to work properly.
+Once users create pipelines in the StreamPipes Pipeline Editor, these requirements are verified against the connected event stream.
+By using this feature, StreamPipes ensures that only pipeline elements can be connected that are syntactically and semantically valid.
+
+This guide covers the creation of stream requirements. Before reading this section, we recommend that you make yourself familiar with the SDK guide on [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://www.github.com/apache/incubator-streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/apache/streampipes/pe/examples/jvm/requirements/">Github</a>.</p>
+</div>
+
+## The StreamRequirementsBuilder
+
+Stream requirements can be defined in the ``declareModel`` method of the pipeline element class. Start with a method body like this:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+The ``StreamRequirementsBuilder`` class provides methods to add stream requirements to a pipeline element.
+
+## Requirements on primitive fields
+
+As a very first example, let's assume we would like to create a data processor that filters numerical values that are above a given threshold.
+Consequently, any data stream that is connected to the filter processor needs to provide a numerical value.
+
+The stream requirement would be assigned as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredProperty(EpRequirements.numberReq())
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+Note the line starting with ``requiredProperty``, which requires any stream to provide a datatype of type ``number``.
+
+In many cases, you'll want to let the user select a specific field from a data stream from all available fields that match the specified requirement. For that, you simply use the method ``requiredPropertyWithUnaryMapping`` as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(),
+                    Labels.from("number-mapping", "The value that should be filtered", ""), PropertyScope.NONE)
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+See also the developer guide on [static properties](dev-guide-static-properties.md) to better understand the usage of ``MappingProperties``.
+
+Requirements on primitive fields can be specified for all common datatypes:
+
+```java
+ @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".simple", "Simple requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.numberReq()) // any number
+                    .requiredProperty(EpRequirements.doubleReq()) // any field of type double
+                    .requiredProperty(EpRequirements.booleanReq()) // any field of type boolean
+                    .requiredProperty(EpRequirements.integerReq()) // any field of type integer
+                    .requiredProperty(EpRequirements.stringReq()) // any field of type string
+
+                    .requiredProperty(EpRequirements.anyProperty()) // any field allowed (no restriction)
+                    .requiredProperty(EpRequirements.timestampReq())  // any timestamp field
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+```
+
+### Specifying semantics
+
+For some algorithms, only specifying the datatype is not sufficient. Let's consider a geofencing algorithm that detects the precense some geospatial coordinate (e.g., from a vehicle) within a given location.
+
+You could specify something like this:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+However, this would allow users to create strange pipelines as users could connect any stream containing a double value to our geofencing algorithm.
+To avoid such situations, you can also specify requirements based on the semantics of a field:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Latitude), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Longitude), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+Note that in this case, we make use of Schema.org's ``Latitude`` concept ([https://schema.org/latitude](https://schema.org/latitude)). StreamPipes already includes popular vocabularies for specifying semantics. You are also free to use your own vocabularies.
+
+
+## Requirements on lists
+
+Similarly to primitive requirements, you can define processors that require data streams with list fields, see the following examples:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".list", "List requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Integer))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Double))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Boolean))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.String))
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+## Requirements on nested properties
+
+(coming soon, see the Javadoc for now)
+
+
+
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-setup.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-setup.md
new file mode 100644
index 0000000..6b7c581
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-setup.md
@@ -0,0 +1,32 @@
+---
+id: version-0.69.0-extend-setup
+title: Development Setup
+sidebar_label: Development Setup
+original_id: extend-setup
+---
+
+Pipeline elements in StreamPipes are provided as standalone microservices. New pipeline elements can be easily developed using the provided Maven archetypes and can be installed in StreamPipes at runtime.
+
+In this section, we describe our recommended minimum setup for locally setting up a development instance of StreamPipes needed to develop, run and test new pipeline elements.
+
+## IDE & required dev tools
+StreamPipes does not have specific requirements on the IDE - so feel free to choose the IDE of your choice.
+The only requirements in terms of development tools are that you have Java 8 and Maven installed.
+
+## StreamPipes CLI: Docker-based local StreamPipes instance
+In order to quickly test developed pipeline elements without needing to install all services required by StreamPipes, we provide a CLI tool that allows you to selectively start StreamPipes components.
+The CLI tool allows to switch to several templates (based on docker-compose) depending on the role. 
+
+The documentation on the usage of the CLI tool is available [here](06_extend-cli.md).
+## Starter projects
+
+Now, once you've started the development instance, you are ready to develop your very first pipeline element.
+Instead of starting from scratch, we recommend using our provided maven archetypes:
+
+### Maven archetypes
+
+Create the Maven archetype as described in the [Maven Archetypes](06_extend-archetypes.md) guide.
+
+### Examples
+
+We provide several examples that explain the usage of some concepts in this [Github repo](https://github.com/apache/incubator-streampipes-examples). 
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-processors.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-processors.md
new file mode 100644
index 0000000..50ede6e
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-processors.md
@@ -0,0 +1,359 @@
+---
+id: version-0.69.0-extend-tutorial-data-processors
+title: Tutorial: Data Processors
+sidebar_label: Tutorial: Data Processors
+original_id: extend-tutorial-data-processors
+---
+
+In this tutorial, we will add a new data processor using the standalone wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data processor and a an implementation.
+
+## Objective
+
+We are going to create a new data processor that realized a simple geofencing algorithm - we detect vehicles that enter a specified radius around a user-defined location.
+This pipeline element will be a generic element that works with any event stream that provides geospatial coordinates in form of a latitude/longitude pair.
+
+The algorithm outputs every location event once the position has entered the geofence.
+
+
+<div class="admonition note">
+<div class="admonition-title">Note</div>
+<p>The implementation in this tutorial is pretty simple - our processor will fire an event every time the GPS location is inside the geofence.
+       In a real-world application, you would probably want to define a pattern that recognizes the _first_ event a vehicle enters the geofence.<br/>
+       This can be easily done using a CEP library.</p>
+</div>
+
+
+## Project setup
+
+Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm).
+Enter the following command in a command line of your choice (Apache Maven needs to be installed):
+
+```
+mvn archetype:generate \
+-DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \
+-DarchetypeVersion=0.69.0 -DgroupId=my.groupId \
+-DartifactId=my-example -DclassNamePrefix=MyExample -DpackageName=mypackagename
+```
+
+You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section.
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data processor for StreamPipes!
+
+## Adding data processor requirements
+
+First, we will add a new stream requirement.
+Create a new class `GeofencingProcessor` which should look as follows:
+
+```java
+package org.apache.streampipes.pe.example;
+
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.model.DataProcessorType;
+import org.apache.streampipes.model.graph.DataProcessorDescription;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.apache.streampipes.sdk.helpers.EpRequirements;
+import org.apache.streampipes.sdk.helpers.Labels;
+import org.apache.streampipes.sdk.helpers.Locales;
+import org.apache.streampipes.sdk.helpers.OutputStrategies;
+import org.apache.streampipes.sdk.utils.Assets;
+import org.apache.streampipes.wrapper.context.EventProcessorRuntimeContext;
+import org.apache.streampipes.wrapper.routing.SpOutputCollector;
+import org.apache.streampipes.wrapper.standalone.ProcessorParams;
+import org.apache.streampipes.wrapper.standalone.StreamPipesDataProcessor;
+
+public class GeofencingProcessor extends StreamPipesDataProcessor {
+
+ private static final String LATITUDE_CENTER = "latitude-center";
+ private static final String LONGITUDE_CENTER = "longitude-center";
+
+ @Override
+ public DataProcessorDescription declareModel() {
+  return ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing")
+          .category(DataProcessorType.ENRICH)
+          .withAssets(Assets.DOCUMENTATION, Assets.ICON)
+          .build();
+ }
+
+ @Override
+ public void onInvocation(ProcessorParams parameters, SpOutputCollector spOutputCollector, EventProcessorRuntimeContext runtimeContext) throws SpRuntimeException {
+
+ }
+
+ @Override
+ public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException {
+
+ }
+
+ @Override
+ public void onDetach() throws SpRuntimeException {
+
+ }
+}
+
+
+```
+
+In this class, we need to implement three methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `onInvocation` is triggered once a pipeline is started. Finally, the `onEvent` method
+
+Similar to data sources, the SDK provides a builder class to generate the description for data processors.
+Delete the content within the ``declareModel`` method and add the following lines to the `declareModel` method:
+
+```java
+return ProcessingElementBuilder.create("org.streampipes.tutorial.geofencing", "Geofencing", "A simple geofencing data processor")
+```
+
+This creates a new data processor with the ID, title and description assigned to the element builder.
+Next, we add some _stream requirements_ to the description. As we'd like to develop a generic pipeline element that works with any event that provides a lat/lng pair, we define two stream requirements as stated below:
+
+```java
+.requiredStream(StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+            Labels.from("latitude-field", "Latitude", "The event " +
+            "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+            Labels.from("longitude-field", "Longitude", "The event " +
+                    "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .build())
+```
+
+The first line, `.requiredStream()` defines that we want a data processor with exactly one input stream. Adding more stream requirements would create elements with multiple input connectors in StreamPipes.
+Stream requirements can be assigned by using the `StreamRequirementsBuilder` class.
+In our example, we define two requirements, so-called _domain property requirements_. In contrast to _data type requirements_ where we'd expect an event property with a field of a specific data type (e.g., float), domain property requirements expect a specific domain property, e.g., from a vocabulary such as the WGS84 Geo vocab.
+
+Once a pipeline is deployed, we are interested in the actual field (and its field name) that contains the latitude and longitude values.
+In some cases, there might me more than one field that satisfies a property requirement and we would like users to select the property the geofencing component should operate on.
+Therefore, our example uses the method `requiredPropertyWithUnaryMapping`, which will map a requirement to a real event property of an input stream and  let the user choose the appropriate field in the StreamPipes UI when pipelines are defined.
+
+Finally, the `PropertyScope` indicates that the required property is a measurement value (in contrast to a dimension value). This allows us later to provide improved user guidance in the pipeline editor.
+
+Besides requirements, users should be able to define the center coordinate of the Geofence and the size of the fence defined as a radius around the center in meters.
+The radius can be defined by adding a simple required text field to the description:
+
+```java
+.requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1)
+```
+
+Similar to mapping properties, text parameters have an internalId (radius), a label and a description.
+In addition, we can assign a _value specification_ to the parameter indicating the value range we support.
+Our example supports a radius value between 0 and 1000 with a granularity of 1.
+In the StreamPipes UI, a required text parameter is rendered as a text input field, in case we provide an optional value specification, a slider input is automatically generated.
+
+Such user-defined parameters are called _static properties_. There are many different types of static properties (see
+ the [Processor SDK](06_extend-sdk-static-properties.md) for an overview).
+
+In this example, we'll further add two very simple input fields to let users provide latitude and longitude of the geofence center.
+
+Add the following line to the `declareModel` method:
+
+```java
+   .requiredFloatParameter(Labels.from(LATITUDE_KEY, "Latitude", "The latitude value"))
+   .requiredFloatParameter(Labels.from(LONGITUDE_KEY, "Longitude", "The longitude value"))
+
+```
+
+Now we need to define the output of our Geofencing pipeline element.
+As explained in the first section, the element should fire every time some geo-located entity arrives within the defined geofence.
+Therefore, the processor outputs the same schema as it receives as an input.
+Although we don't know the exact input right now as it depends on the stream users connect in StreamPipes when creating pipelines, we can define an _output strategy_ as follows:
+
+```java
+.outputStrategy(OutputStrategies.keep())
+```
+
+This defines a _KeepOutputStrategy_, i.e., the input event schema is not modified by the processor.
+There are many more output strategies you can define depending on the functionality you desire, e.g., _AppendOutput_ for defining a processor that enriches events or _CustomOutput_ in case you would like users to select the output by themselves.
+
+That's it! We've now defined input requirements, required user input and an output strategy.
+In the next section, you will learn how to extract these parameters once the pipeline element is invoked after a pipeline was created.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _onInvocation_ method in our class is called. The class `ProcessorParams` includes convenient access to user-configured parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+Next, we are interested in the fields of the input event stream that contains the latitude and longitude value we would like to compute against the geofence center location as follows:
+
+```java
+String latitudeFieldName = extractor.mappingPropertyValue("latitude-field");
+String longitudeFieldName = extractor.mappingPropertyValue("longitude-field");
+```
+
+We use the same `internalId` we've used to define the mapping property requirements in the `declareModel` method.
+
+Next, for extracting the geofence center coordinates, add to class variables centerLatitude and centerLongitude and assign the selected values using the following statements:
+
+```java
+this.centerLatitude = parameters.extractor().singleValueParameter(LATITUDE_CENTER, Float.class);
+this.centerLongitude = parameters.extractor().singleValueParameter(LONGITUDE_CENTER, Float.class);
+```
+
+The radius value can be extracted as follows:
+
+```java
+int radius = parameters.extractor().singleValueParameter("radius", Float.class);
+```
+
+Great! That's all we need to describe a data processor for usage in StreamPipes. Your controller class should look as follows:
+
+```java
+package org.apache.streampipes.pe.example;
+
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.model.DataProcessorType;
+import org.apache.streampipes.model.graph.DataProcessorDescription;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.model.schema.PropertyScope;
+import org.apache.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.apache.streampipes.sdk.helpers.EpRequirements;
+import org.apache.streampipes.sdk.helpers.Labels;
+import org.apache.streampipes.sdk.helpers.Locales;
+import org.apache.streampipes.sdk.helpers.OutputStrategies;
+import org.apache.streampipes.sdk.utils.Assets;
+import org.apache.streampipes.vocabulary.Geo;
+import org.apache.streampipes.wrapper.context.EventProcessorRuntimeContext;
+import org.apache.streampipes.wrapper.routing.SpOutputCollector;
+import org.apache.streampipes.wrapper.standalone.ProcessorParams;
+import org.apache.streampipes.wrapper.standalone.StreamPipesDataProcessor;
+
+public class GeofencingProcessor extends StreamPipesDataProcessor {
+
+ private static final String LATITUDE_CENTER = "latitude-center";
+ private static final String LONGITUDE_CENTER = "longitude-center";
+
+ private float centerLatitude;
+ private float centerLongitude;
+ private int radius;
+
+ @Override
+ public DataProcessorDescription declareModel() {
+  return ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing")
+          .category(DataProcessorType.ENRICH)
+          .withAssets(Assets.DOCUMENTATION, Assets.ICON)
+          .withLocales(Locales.EN)
+          .requiredStream(StreamRequirementsBuilder
+                  .create()
+                  .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+                          Labels.from("latitude-field", "Latitude", "The event " +
+                                  "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                  .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+                          Labels.from("longitude-field", "Longitude", "The event " +
+                                  "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                  .build())
+          .outputStrategy(OutputStrategies.keep())
+          .requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1)
+          .requiredFloatParameter(Labels.from(LATITUDE_CENTER, "Latitude", "The latitude value"))
+          .requiredFloatParameter(Labels.from(LONGITUDE_CENTER, "Longitude", "The longitude value"))
+          .build();
+ }
+
+ @Override
+ public void onInvocation(ProcessorParams parameters, SpOutputCollector spOutputCollector, EventProcessorRuntimeContext runtimeContext) throws SpRuntimeException {
+  this.centerLatitude = parameters.extractor().singleValueParameter(LATITUDE_CENTER, Float.class);
+  this.centerLongitude = parameters.extractor().singleValueParameter(LONGITUDE_CENTER, Float.class);
+  this.radius = parameters.extractor().singleValueParameter("radius", Integer.class);
+ }
+
+ @Override
+ public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException {
+
+ }
+
+ @Override
+ public void onDetach() throws SpRuntimeException {
+
+ }
+}
+
+
+```
+
+## Adding an implementation
+
+Everything we need to do now is to add an implementation.
+
+Open the class `GeofencingProcessor.java` and add the following piece of code to the onEvent method, which realizes the Geofencing functionality:
+
+```java
+
+  @Override
+  public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException {
+      float latitude = event.getFieldBySelector(latitudeFieldName).getAsPrimitive().getAsFloat();
+      float longitude = event.getFieldBySelector(longitudeFieldName).getAsPrimitive().getAsFloat();
+
+      float distance = distFrom(latitude, longitude, centerLatitude, centerLongitude);
+
+      if (distance <= radius) {
+      collector.collect(event);
+      }
+  }
+
+  public static float distFrom(float lat1, float lng1, float lat2, float lng2) {
+    double earthRadius = 6371000;
+    double dLat = Math.toRadians(lat2-lat1);
+    double dLng = Math.toRadians(lng2-lng1);
+    double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
+            Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
+                    Math.sin(dLng/2) * Math.sin(dLng/2);
+    double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
+    return (float) (earthRadius * c);
+  }
+
+}
+```
+
+We won't go into details here as this isn't StreamPipes-related code, but in general the class extracts latitude and longitude fields from the input event (which is provided as a map data type) and calculates the distance between the geofence center and these coordinates.
+If the distance is below the given radius, the event is forwarded to the next operator.
+
+See the [event model](06_extend-sdk-event-model.md) guide to learn how to extract parameters from events.
+
+## Registering the pipeline element
+The final step is to register the data processor in the `Init` method. Add the following line to the `SpServiceDefinitionBuilder`:
+
+```java
+ .registerPipelineElement(new GeofencingProcessor())
+```
+
+## Starting the service
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box.
+In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element services as defined in the `create` method is port 8090.
+       If you'd like to run multiple services at the same time on your development machine, change the port here. As an alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use different configs for dev and prod environments.
+</p>
+</div>
+
+Now we are ready to start our service!
+
+Execute the main method in the class `Init` we've just created, open a web browser and navigate to http://localhost:8090 (or the port you have assigned).
+
+You should see something as follows:
+
+<img src="/docs/img/tutorial-processors/pe-overview-flink.PNG" alt="Pipeline Element Container Overview">
+
+
+The services automatically registers itself in StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](03_use-install-pipeline-elements.md).
+
+## Read more
+
+Congratulations! You've just created your first data processor for StreamPipes.
+There are many more things to explore and data processors can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](06_extend-sdk-static-properties.md) to see what's possible!
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sinks.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sinks.md
new file mode 100644
index 0000000..37c09a9
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sinks.md
@@ -0,0 +1,231 @@
+---
+id: version-0.69.0-extend-tutorial-data-sinks
+title: Tutorial: Data Sinks
+sidebar_label: Tutorial: Data Sinks
+original_id: extend-tutorial-data-sinks
+---
+
+In this tutorial, we will add a new data sink using the standalone wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data sink and a corresponding implementation.
+
+## Objective
+
+We are going to create a new data sink that calls an external HTTP endpoint to forward data to an external service.
+
+For each incoming event, an external service is invoked using an HTTP POST request. In this example, we'll call an endpoint provided by [RequestBin](https://requestbin.com/).
+To setup your own endpoint, go to [https://requestbin.com/](https://requestbin.com/) and click "Create a request bin". Copy the URL of the newly created endpoint.
+
+
+## Project setup
+
+Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm).
+Enter the following command in a command line of your choice (Apache Maven needs to be installed):
+
+```
+mvn archetype:generate -DarchetypeGroupId=org.apache.streampipes \
+-DarchetypeArtifactId=streampipes-archetype-extensions-jvm -DarchetypeVersion=0.68.0 \
+-DgroupId=org.streampipes.tutorial -DartifactId=sink-tutorial -DclassNamePrefix=Rest -DpackageName=mypackage
+```
+
+You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section.
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data sink for StreamPipes!
+
+## Adding data sink requirements
+
+First, we will add a new stream requirement.
+Create a class `RestSink` which should look as follows:
+
+```java
+package org.apache.streampipes.pe.example;
+
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.model.DataSinkType;
+import org.apache.streampipes.model.graph.DataSinkDescription;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.model.schema.PropertyScope;
+import org.apache.streampipes.sdk.builder.DataSinkBuilder;
+import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.apache.streampipes.sdk.helpers.EpRequirements;
+import org.apache.streampipes.sdk.helpers.Labels;
+import org.apache.streampipes.sdk.helpers.Locales;
+import org.apache.streampipes.sdk.utils.Assets;
+import org.apache.streampipes.wrapper.context.EventSinkRuntimeContext;
+import org.apache.streampipes.wrapper.standalone.SinkParams;
+import org.apache.streampipes.wrapper.standalone.StreamPipesDataSink;
+
+public class RestSink extends StreamPipesDataSink {
+
+  @Override
+  public DataSinkDescription declareModel() {
+    return DataSinkBuilder.create("org.streampipes.tutorial.pe.sink.rest")
+        .category(DataSinkType.NOTIFICATION)
+        .withAssets(Assets.DOCUMENTATION, Assets.ICON)
+        .withLocales(Locales.EN)
+        .requiredStream(StreamRequirementsBuilder
+                .create()
+                .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId(
+                        "fields-to-send"), PropertyScope.NONE)
+                .build())
+        .build();
+  }
+
+  @Override
+  public void onInvocation(SinkParams parameters, EventSinkRuntimeContext runtimeContext) throws SpRuntimeException {
+
+  }
+
+  @Override
+  public void onEvent(Event event) throws SpRuntimeException {
+
+  }
+
+  @Override
+  public void onDetach() throws SpRuntimeException {
+
+  }
+}
+
+
+```
+
+In this class, we need to implement three methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `onInvocation` is called once a pipeline using this sink is started. The third method, `onEvent`, is called for every incoming event.
+
+The ``declareModel`` method describes the properties of our data sink:
+* ``category`` defines a category for this sink.
+* ``withAssets`` denotes that we will provide an external documentation file and an icon, which can be found in the ``resources`` folder
+* ``withLocales`` defines that we will provide an external language file, also available in the ``resources`` folder
+* ``requiredStream`` defines requirements any input stream connected to this sink must provide. In this case, we do not have any specific requirements, we just forward all incoming events to the REST sink. However, we want to let the user display a list of available fields from the connected input event, where users can select a subset. This is defined by defining a Mapping from the empty requirement. This will later on render a selection dialog in the pipeline editor.
+
+The ``onInvocation`` method is called when a pipeline containing the sink is started. Once a pipeline is started, we would like to extract user-defined parameters.
+In this example, we simply extract the fields selected by users that should be forwarded to the REST sink. Finally, we return a new configured event sink containing the parameters.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _onInvocation_ method in our class is called. The class `SinkParams` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+
+## Adding an implementation
+
+Now we'll add a proper implementation (i.e., the Rest call executed for every incoming event) to the following methods:
+
+Our final class should look as follows:
+
+```java
+package org.apache.streampipes.pe.example;
+
+import com.google.common.base.Charsets;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.entity.StringEntity;
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.dataformat.SpDataFormatDefinition;
+import org.apache.streampipes.dataformat.json.JsonDataFormatDefinition;
+import org.apache.streampipes.model.DataSinkType;
+import org.apache.streampipes.model.graph.DataSinkDescription;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.model.schema.PropertyScope;
+import org.apache.streampipes.sdk.builder.DataSinkBuilder;
+import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.apache.streampipes.sdk.helpers.EpRequirements;
+import org.apache.streampipes.sdk.helpers.Labels;
+import org.apache.streampipes.sdk.helpers.Locales;
+import org.apache.streampipes.sdk.utils.Assets;
+import org.apache.streampipes.wrapper.context.EventSinkRuntimeContext;
+import org.apache.streampipes.wrapper.standalone.SinkParams;
+import org.apache.streampipes.wrapper.standalone.StreamPipesDataSink;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+public class RestSink extends StreamPipesDataSink {
+
+  private static final Logger LOG = LoggerFactory.getLogger(RestSink.class);
+
+  private static final String REST_ENDPOINT_URI = YOUR_REQUEST_BIN_URL;
+  private List<String> fieldsToSend;
+  private SpDataFormatDefinition dataFormatDefinition;
+
+  @Override
+  public DataSinkDescription declareModel() {
+    ...
+  }
+
+  @Override
+  public void onInvocation(SinkParams parameters, EventSinkRuntimeContext runtimeContext) throws SpRuntimeException {
+    this.dataFormatDefinition = new JsonDataFormatDefinition();
+    this.fieldsToSend = parameters.extractor().mappingPropertyValues("fields-to-send");
+
+  }
+
+  @Override
+  public void onEvent(Event event) throws SpRuntimeException {
+    Map<String, Object> outEventMap = event.getSubset(fieldsToSend).getRaw();
+    try {
+      String json = new String(dataFormatDefinition.fromMap(outEventMap));
+      Request.Post(REST_ENDPOINT_URI).body(new StringEntity(json, Charsets.UTF_8)).execute();
+    } catch (SpRuntimeException e) {
+      LOG.error("Could not parse incoming event");
+    } catch (IOException e) {
+      LOG.error("Could not reach endpoint at {}", REST_ENDPOINT_URI);
+    }
+  }
+
+  @Override
+  public void onDetach() throws SpRuntimeException {
+
+  }
+}
+
+```
+The only class variable you need to change right now is the REST_ENDPOINT_URL. Change this url to the URL provided by your request bin.
+In the ``ònEvent`` method, we use a helper method to get a subset of the incoming event.
+Finally, we convert the resulting ``Map`` to a JSON string and call the endpoint.
+
+
+## Preparing the service
+The final step is to register the sink as a pipeline element.
+
+Go to the class `Init` and register the sink:
+```java
+.registerPipelineElement(new RestSink())
+```
+
+## Starting the service
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box.
+In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element services as defined in the `create` method is port 8090.
+       If you'd like to run multiple services at the same time on your development machine, change the port here. As an alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use different configs for dev and prod environments.
+</p>
+</div>
+
+Now we are ready to start our service!
+
+Execute the main method in the class `Init` we've just created, open a web browser and navigate to http://localhost:8090 (or the port you have assigned).
+
+The services automatically registers itself in StreamPipes.
+
+To install the created element, open the StreamPipes UI and follow the manual provided in the [user guide](03_use-install-pipeline-elements.md).
+
+## Read more
+
+Congratulations! You've just created your first data sink for StreamPipes.
+There are many more things to explore and data sinks can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](../dev-guide-sdk-guide-sinks) to see what's possible!
diff --git a/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sources.md b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sources.md
new file mode 100644
index 0000000..21a16c3
--- /dev/null
+++ b/documentation/website/versioned_docs/version-0.69.0/06_extend-tutorial-data-sources.md
@@ -0,0 +1,212 @@
+---
+id: version-0.69.0-extend-tutorial-data-sources
+title: Tutorial: Data Sources
+sidebar_label: Tutorial: Data Sources
+original_id: extend-tutorial-data-sources
+---
+
+In this tutorial, we will add a new data source consisting of a single data stream. The source will be provided as a standalone component (i.e., the description will be accessible through an integrated web server).
+
+## Objective
+
+We are going to create a new data stream that is produced by a GPS sensor installed in a delivery vehicle.
+The sensor produces a continuous stream of events that contain the current timestamp, the current lat/lng position of the vehicle and the plate number of the vehicle.
+Events are published in a JSON format as follows:
+```json
+{
+  "timestamp" : 145838399,
+  "latitude" : 37.04,
+  "longitude" : 17.04,
+  "plateNumber" : "KA-AB 123"
+}
+```
+
+These events are published to a Kafka broker using the topic `org.streampipes.tutorial.vehicle`.
+
+In the following section, we show how to describe this stream in a form that allows you to import and use it in StreamPipes.
+
+## Project setup
+
+Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm).
+Enter the following command in a command line of your choice (Apache Maven needs to be installed):
+
+```
+mvn archetype:generate \
+-DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \
+-DarchetypeVersion=0.69.0 -DgroupId=my.groupId \
+-DartifactId=my-source -DclassNamePrefix=MySource -DpackageName=mypackagename
+```
+
+You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section.
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.
+</p>
+</div>
+
+## Adding a data stream description
+
+Now we will add a new data stream definition.
+First, create a new class `MyVehicleStream` which should look as follows:
+
+```java
+
+package org.apache.streampipes.pe.example;
+
+import org.apache.streampipes.model.SpDataStream;
+import org.apache.streampipes.sources.AbstractAdapterIncludedStream;
+
+public class MyVehicleStream extends AbstractAdapterIncludedStream {
+
+  @Override
+  public SpDataStream declareModel() {
+    return null;
+  }
+
+  @Override
+  public void executeStream() {
+
+  }
+}
+```
+
+This class extends the class ``AbstractAdapterIncludedStream``, which indicates that this source continuously produces data (configured in the ``executeStream()`` method.
+In contrast, the class `AbstractAlreadyExistingStream` indicates that we only want to describe an already existing stream (e.g., a stream that already sends data to an existing Kafka broker).
+
+Next, we will add the definition of the data stream. Add the following code inside of the `declareModel` method:
+```java
+return DataStreamBuilder.create("org.streampipes.tutorial.vehicle.position", "Vehicle Position", "An event stream " +
+          "that produces current vehicle positions")
+```
+
+This line creates a new instance of the SDK's `DataStreamBuilder` by providing three basic parameters:
+The first parameter must be a unique identifier of your data stream.
+The second and third parameters indicate a label and a description of your stream.
+These values will later be used in the StreamPipes UI to display stream details in a human-readable manner.
+
+Next, we will add the properties as stated above to the stream definition by adding the following lines:
+```java
+.property(EpProperties.timestampProperty("timestamp"))
+.property(EpProperties.stringEp(Labels.from("plate-number", "Plate Number", "Denotes the plate number of the vehicle"), "plateNumber", "http://my.company/plateNumber"))
+.property(EpProperties.doubleEp(Labels.from("latitude", "Latitude", "Denotes the latitude value of the vehicle's position"), "latitude", Geo.lat))
+.property(EpProperties.doubleEp(Labels.from("longitude", "Longitude", "Denotes the longitude value of the vehicle's position"), "longitude", Geo.lng))
+```
+These four _event properties_ compose our _event schema_. An event property must, at least, provide the following attributes:
+
+* **Runtime Name**. The runtime name indicates the key of the property at runtime, e.g., if our JSON message contains a structure such as `{"plateNumber" : "KA-F 123"}`, the runtime name must be `plateNumber`.
+* **Runtime Type**. An event property must have a primitive type (we will later see how to model more complex properties such as lists and nested properties).
+  The type must be an instance of `XMLSchema` primitives, however, the SDK provides convenience methods to provide the property type.
+* **Domain Property**. The domain property indicates the semantics of the event property. For instance, the `latitude` property is linked to the `http://www.w3.org/2003/01/geo/wgs84_pos#lat` property of the WGS84 vocabulary.
+  The domain property should be an URI as part of an existing or domain-specific vocabulary. The SDK provides convenience methods for popuplar vocabularies (e.g., Schema.org, Dolce or WGS84).
+
+In order to complete the minimum required specification of an event stream, we need to provide information on the transport format and protocol of the data stream at runtime.
+
+This can be achieved by extending the builder with the respective properties:
+```java
+.format(Formats.jsonFormat())
+.protocol(Protocols.kafka("localhost", 9094, "TOPIC_SHOULD_BE_CHANGED"))
+.build();
+```
+
+Set ``org.streampipes.tutorial.vehicle`` as your new topic by replacing the term ``TOPIC_SHOULD_BE_CHANGED`.
+
+In this example, we defined that the data stream consists of events in a JSON format and that Kafka is used as a message broker to transmit events.
+The last build() method call triggers the construction of the data stream definition.
+
+That's it! In the next section, we will connect the data stream to a source and inspect the generated RDF description.
+
+## Creating some dummy data
+
+Let's assume our stream should produce some random values that are sent to StreamPipes. We'll add a very simple data simulator to the ``executeStream`` method as follows:
+
+```java
+@Override
+  public void executeStream() {
+
+    SpKafkaProducer producer = new SpKafkaProducer("localhost:9094", "my-topic", Collections.emptyList());
+    Random random = new Random();
+    Runnable runnable = () -> {
+      for (;;) {
+        JsonObject jsonObject = new JsonObject();
+        jsonObject.addProperty("timestamp", System.currentTimeMillis());
+        jsonObject.addProperty("plateNumber", "KA-FZ 1");
+        jsonObject.addProperty("latitude", random.nextDouble());
+        jsonObject.addProperty("longitude", random.nextDouble());
+    
+        producer.publish(jsonObject.toString());
+    
+        try {
+        TimeUnit.SECONDS.sleep(1);
+        } catch (InterruptedException e) {
+        e.printStackTrace();
+        }
+  
+      }
+    };
+
+    new Thread(runnable).start();
+  }
+```
+
+Change the topic and the URL of your Kafka broker (as stated in the controller).
+
+## Registering the data stream
+
+You need to register the stream in the service definition. Open the ``Init`` class and register the ``MyVehicleStream``:
+
+```java
+
+  @Override
+  public SpServiceDefinition provideServiceDefinition() {
+    return SpServiceDefinitionBuilder.create("org.apache.streampipes",
+                    "human-readable service name",
+                    "human-readable service description", 8090)
+            .registerPipelineElement(new ExampleDataProcessor())
+            .registerPipelineElement(new ExampleDataSink())
+            .registerPipelineElement(new MyVehicleStream())
+            .registerMessagingFormats(
+                    new JsonDataFormatFactory(),
+                    new CborDataFormatFactory(),
+                    new SmileDataFormatFactory(),
+                    new FstDataFormatFactory())
+            .registerMessagingProtocols(
+                    new SpKafkaProtocolFactory(),
+                    new SpJmsProtocolFactory(),
+                    new SpMqttProtocolFactory())
+            .build();
+  }
+
+```
+
+You can remove the other two example classes if you want.
+
+## Starting the service
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box.
+In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery.
+</p>
+</div>
+
+Now we are ready to start our first container!
+
+Execute the main method in the class `Init`, open a web browser and navigate to http://localhost:8090, or change the port according to the value of the ``SP_PORT`` variable in the env file.
+
+You should see something as follows:
+
+<img src="/docs/img/tutorial-sources/pe-overview.PNG" alt="Pipeline Element Container Overview">
+
+Click on the link of the data source to see the generated description of the pipeline element.
+
+<img src="/docs/img/tutorial-sources/pe-rdf.PNG" alt="Pipeline Element description">
+
+The container automatically registers itself in StreamPipes.
+
+To install the just created element, open the StreamPipes UI and install the source over the ``Install Pipeline Elements`` section.
+
+## Read more
+
+Congratulations! You've just created your first pipeline element for StreamPipes.
+There are many more things to explore and data sources can be defined in much more detail.
diff --git a/documentation/website/versioned_sidebars/version-0.69.0-sidebars.json b/documentation/website/versioned_sidebars/version-0.69.0-sidebars.json
new file mode 100644
index 0000000..a4ff54c
--- /dev/null
+++ b/documentation/website/versioned_sidebars/version-0.69.0-sidebars.json
@@ -0,0 +1,240 @@
+{
+  "version-0.69.0-documentation": {
+    "🚀 Try StreamPipes": [
+      "version-0.69.0-user-guide-introduction",
+      "version-0.69.0-try-installation",
+      "version-0.69.0-try-tutorial"
+    ],
+    "💡 Concepts": [
+      "version-0.69.0-concepts-overview"
+    ],
+    "🎓 Use StreamPipes": [
+      "version-0.69.0-use-connect",
+      "version-0.69.0-use-pipeline-editor",
+      "version-0.69.0-use-managing-pipelines",
+      "version-0.69.0-use-dashboard",
+      "version-0.69.0-use-data-explorer",
+      "version-0.69.0-use-notifications",
+      "version-0.69.0-use-install-pipeline-elements",
+      "version-0.69.0-use-configurations"
+    ],
+    "📚 Pipeline Elements": [
+      {
+        "type": "subcategory",
+        "label": "Adapters",
+        "ids": [
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.kafka",
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.pulsar",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.coindesk",
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.file",
+          "version-0.69.0-pe/org.apache.streampipes.protocol.set.file",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.flic.mqtt",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.gdelt",
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.httpserver",
+          "version-0.69.0-pe/org.apache.streampipes.protocol.set.http",
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.http",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.iex.news",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.iex.stocks",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.iss",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.image.set",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.image.stream",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.influxdb.set",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.influxdb.stream",
+          "version-0.69.0-pe/org.apache.streampipes.connect.protocol.stream.mqtt",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.simulator.machine",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.mysql.set",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.mysql.stream",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.netio.mqtt",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.netio.rest",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.opcua",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.plc4x.modbus",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.plc4x.s7",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.ros",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.simulator.randomdataset",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.slack",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.ti",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.wikipedia.edit",
+          "version-0.69.0-pe/org.apache.streampipes.connect.adapters.wikipedia.new"
+        ]
+      },
+      {
+        "type": "subcategory",
+        "label": "Data Processors",
+        "ids": [
+          "version-0.69.0-pe/org.apache.streampipes.processors.pattern-detection.flink.absence",
+          "version-0.69.0-pe/org.apache.streampipes.processors.aggregation.flink.aggregation",
+          "version-0.69.0-pe/org.apache.streampipes.processors.pattern-detection.flink.and",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.duration-value",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.chunker",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.compose",
+          "version-0.69.0-pe/org.apache.streampipes.processors.aggregation.flink.count",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.count-array",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo",
+          "version-0.69.0-pe/org.apache.streampipes.processors.changedetection.jvm.cusum",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG",
+          "version-0.69.0-pe/org.apache.streampipes.processors.aggregation.flink.eventcount",
+          "version-0.69.0-pe/org.apache.streampipes.processors.aggregation.flink.rate",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.field-converter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.fieldhasher",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.field-mapper",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.field-renamer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.frequency",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.frequencychange",
+          "version-0.69.0-pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification",
+          "version-0.69.0-pe/org.apache.streampipes.processor.geo.jvm.geocoding",
+          "version-0.69.0-pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper",
+          "version-0.69.0-pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.flink.languagedetection",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.languagedetection",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.enrich",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.namefinder",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.numericalfilter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.numericalfilter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.partofspeech",
+          "version-0.69.0-pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.project",
+          "version-0.69.0-pe/org.apache.streampipes.processor.imageclassification.qrcode",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.limit",
+          "version-0.69.0-pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection",
+          "version-0.69.0-pe/org.apache.streampipes.processors.pattern-detection.flink.sequence",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.sequence",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure",
+          "version-0.69.0-pe/org.apache.streampipes.processor.geo.flink",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.processor.speed",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.split-array",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator",
+          "version-0.69.0-pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop",
+          "version-0.69.0-pe/org.apache.streampipes.processors.statistics.flink.statistics-summary",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.stop",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.merge",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.taskduration",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.textfilter",
+          "version-0.69.0-pe/org.apache.streampipes.processors.filters.jvm.threshold",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.flink.timestamp",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.jvm.tokenizer",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean",
+          "version-0.69.0-pe/org.apache.streampipes.processors.siddhi.increase",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry",
+          "version-0.69.0-pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing",
+          "version-0.69.0-pe/org.apache.streampipes.processors.transformation.jvm.changed-value",
+          "version-0.69.0-pe/org.apache.streampipes.processors.textmining.flink.wordcount"
+        ]
+      },
+      {
+        "type": "subcategory",
+        "label": "Data Sinks",
+        "ids": [
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.couchdb",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.internal.jvm.dashboard",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.internal.jvm.datalake",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.ditto",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.flink.elasticsearch",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.notifications.jvm.email",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.influxdb",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.iotdb",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.jms",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.kafka",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.mysql",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.nats",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.internal.jvm.notification",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.opcua",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.notifications.jvm.onesignal",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.databases.jvm.postgresql",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.pulsar",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.rest",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.notifications.jvm.slack",
+          "version-0.69.0-pe/org.apache.streampipes.sinks.notifications.jvm.telegram"
+        ]
+      }
+    ],
+    "⚡ Deploy StreamPipes": [
+      "version-0.69.0-deploy-docker",
+      "version-0.69.0-deploy-kubernetes",
+      "version-0.69.0-deploy-use-ssl",
+      "version-0.69.0-deploy-security"
+    ],
+    "💻 Extend StreamPipes": [
+      "version-0.69.0-extend-setup",
+      "version-0.69.0-extend-cli",
+      "version-0.69.0-extend-archetypes",
+      "version-0.69.0-extend-first-processor",
+      "version-0.69.0-extend-tutorial-data-sources",
+      "version-0.69.0-extend-tutorial-data-processors",
+      "version-0.69.0-extend-tutorial-data-sinks",
+      "version-0.69.0-extend-sdk-event-model",
+      "version-0.69.0-extend-sdk-stream-requirements",
+      "version-0.69.0-extend-sdk-static-properties",
+      "version-0.69.0-extend-sdk-output-strategies",
+      "version-0.69.0-extend-sdk-migration-service-discovery"
+    ],
+    "🔧 Technicals": [
+      "version-0.69.0-technicals-architecture",
+      "version-0.69.0-technicals-user-guidance",
+      "version-0.69.0-technicals-runtime-wrappers",
+      "version-0.69.0-technicals-messaging",
+      "version-0.69.0-technical-configuration"
+    ],
+    "👪 Community": [
+      "version-0.69.0-community-get-help",
+      "version-0.69.0-community-contribute"
+    ]
+  },
+  "version-0.69.0-developer-guide": {
+    "Basics": [
+      "version-0.69.0-dev-guide-introduction",
+      "version-0.69.0-dev-guide-architecture",
+      "version-0.69.0-dev-guide-development-environment",
+      "version-0.69.0-dev-guide-archetype",
+      "version-0.69.0-dev-guide-migration"
+    ],
+    "Tutorials": [
+      "version-0.69.0-dev-guide-tutorial-sources",
+      "version-0.69.0-dev-guide-tutorial-processors",
+      "version-0.69.0-dev-guide-tutorial-sinks"
+    ],
+    "SDK Reference": [
+      "version-0.69.0-dev-guide-sdk-guide-sources",
+      "version-0.69.0-dev-guide-sdk-guide-processors",
+      "version-0.69.0-dev-guide-sdk-guide-sinks",
+      "version-0.69.0-dev-guide-stream-requirements",
+      "version-0.69.0-dev-guide-static-properties",
+      "version-0.69.0-dev-guide-output-strategies",
+      "version-0.69.0-dev-guide-event-model"
+    ],
+    "Configuration": [
+      "version-0.69.0-dev-guide-ssl",
+      "version-0.69.0-dev-guide-configuration"
+    ]
+  },
+  "version-0.69.0-faq": {
+    "FAQ": [
+      "version-0.69.0-faq-common-problems"
+    ]
+  }
+}
diff --git a/documentation/website/versions.json b/documentation/website/versions.json
index 9d7c8ed..a17b01a 100644
--- a/documentation/website/versions.json
+++ b/documentation/website/versions.json
@@ -1,4 +1,5 @@
 [
+  "0.69.0",
   "0.68.0",
   "0.67.0",
   "0.66.0",