You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2021/01/21 20:19:44 UTC

[airflow] 19/44: Changes release image preparation to use PyPI packages (#12990)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a7a67290648a6f86d41b8b6fd0dda645769c608b
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sat Dec 12 12:01:58 2020 +0100

    Changes release image preparation to use PyPI packages (#12990)
    
    * Changes release image preparation to use PyPI packages
    
    Since we released all teh provider packages to PyPI now in
    RC version, we can now change the mechanism to prepare the
    production to use released packages in case of tagged builds.
    
    The "branch" production images are still prepared using the
    CI images and .whl packages built from sources, but the
    release packages are built from officially released PyPI
    packages.
    
    Also some corrections and updates were made to the release process:
    
    * the constraint tags when RC candidate is sent should contain
      rcn suffix.
    
    * there was missing step about pushing the release tag once the
      release is out
    
    * pushing tag to GitHub should be done after the PyPI packages
      are uploaded, so that automated image building in DockerHub
      can use those packages.
    
    * added a note that in case we will release some provider
      packages that depend on the just released airflow version
      they shoudl be released after airflow is in PyPI but before
      the tag is pushed to GitHub (also to allow the image to be
      build automatically from the released packages)
    
    Fixes: #12970
    
    * Update dev/README_RELEASE_AIRFLOW.md
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    
    * Update dev/README_RELEASE_AIRFLOW.md
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    (cherry picked from commit db027735a7ee03d2678c8ca8712dfd75de148261)
---
 BREEZE.rst                                  |    4 +-
 Dockerfile                                  |   62 +-
 Dockerfile.ci                               |    1 +
 IMAGES.rst                                  |    4 +-
 breeze-complete                             |    3 +-
 dev/README.md                               | 1511 +--------------------------
 dev/README_RELEASE_AIRFLOW.md               |  733 +++++++++++++
 docs/production-deployment.rst              |  232 ++--
 scripts/ci/images/ci_build_dockerhub.sh     |   95 +-
 scripts/ci/libraries/_build_images.sh       |   68 +-
 scripts/ci/libraries/_initialization.sh     |   60 +-
 scripts/ci/libraries/_parameters.sh         |    1 -
 scripts/in_container/_in_container_utils.sh |  278 ++++-
 13 files changed, 1276 insertions(+), 1776 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index 1ed3cfe..633fb4d 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1254,7 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
@@ -2209,7 +2209,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
diff --git a/Dockerfile b/Dockerfile
index 23a9915..eecc683 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -167,15 +167,17 @@ ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}
 ENV PATH=${PATH}:/root/.local/bin
 RUN mkdir -p /root/.local/bin
 
-ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
-ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
-
 RUN if [[ -f /docker-context-files/.pypirc ]]; then \
         cp /docker-context-files/.pypirc /root/.pypirc; \
     fi
 
 RUN pip install --upgrade "pip==${PIP_VERSION}"
 
+# By default we do not use pre-cached packages, but in CI/Breeze environment we override this to speed up
+# builds in case setup.py/setup.cfg changed. This is pure optimisation of CI/Breeze builds.
+ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
+
 # In case of Production build image segment we want to pre-install master version of airflow
 # dependencies from GitHub so that we do not have to always reinstall it from the scratch.
 RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \
@@ -188,10 +190,13 @@ RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \
           && pip uninstall --yes apache-airflow; \
     fi
 
-ARG AIRFLOW_SOURCES_FROM="."
+# By default we install latest airflow from PyPI so we do not need to copy sources of Airflow
+# but in case of breeze/CI builds we use latest sources and we override those
+# those SOURCES_FROM/TO with "." and "/opt/airflow" respectively
+ARG AIRFLOW_SOURCES_FROM="empty"
 ENV AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM}
 
-ARG AIRFLOW_SOURCES_TO="/opt/airflow"
+ARG AIRFLOW_SOURCES_TO="/empty"
 ENV AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO}
 
 COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
@@ -199,28 +204,41 @@ COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
 ARG CASS_DRIVER_BUILD_CONCURRENCY
 ENV CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY}
 
+# This is airflow version that is put in the label of the image build
 ARG AIRFLOW_VERSION
 ENV AIRFLOW_VERSION=${AIRFLOW_VERSION}
 
 ARG ADDITIONAL_PYTHON_DEPS=""
 ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
 
-ARG AIRFLOW_INSTALL_SOURCES="."
-ENV AIRFLOW_INSTALL_SOURCES=${AIRFLOW_INSTALL_SOURCES}
+# Determines the way airflow is installed. By default we install airflow from PyPI `apache-airflow` package
+# But it also can be `.` from local installation or GitHub URL pointing to specific branch or tag
+# Of Airflow. Note That for local source installation you need to have local sources of
+# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM and AIRFLOW_SOURCES_TO
+# set to "." and "/opt/airflow" respectively.
+ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
+ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
 
+# By default latest released version of airflow is installed (when empty) but this value can be overriden
+# and we can install specific version of airflow this way.
 ARG AIRFLOW_INSTALL_VERSION=""
 ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION}
 
-ARG AIRFLOW_LOCAL_PIP_WHEELS=""
-ENV AIRFLOW_LOCAL_PIP_WHEELS=${AIRFLOW_LOCAL_PIP_WHEELS}
-
-ARG INSTALL_AIRFLOW_VIA_PIP="true"
-ENV INSTALL_AIRFLOW_VIA_PIP=${INSTALL_AIRFLOW_VIA_PIP}
-
-ARG SLUGIFY_USES_TEXT_UNIDECODE=""
-ENV SLUGIFY_USES_TEXT_UNIDECODE=${SLUGIFY_USES_TEXT_UNIDECODE}
-
-ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
+# We can seet this value to true in case we want to install .whl .tar.gz packages placed in the
+# docker-context-files folder. This can be done for both - additional packages you want to install
+# and for airflow as well (you have to set INSTALL_FROM_PYPI to false in this case)
+ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
+ENV INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES}
+
+# By default we install latest airflow from PyPI. You can set it to false if you want to install
+# Airflow from the .whl or .tar.gz packages placed in `docker-context-files` folder.
+ARG INSTALL_FROM_PYPI="true"
+ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI}
+
+# By default we install providers from PyPI but in case of Breze build we want to install providers
+# from local sources without the neeed of preparing provider packages upfront. This value is
+# automatically overridden by Breeze scripts.
+ARG INSTALL_PROVIDERS_FROM_SOURCES="false"
 ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES}
 
 WORKDIR /opt/airflow
@@ -229,16 +247,16 @@ WORKDIR /opt/airflow
 RUN if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then \
         AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}; \
     fi; \
-    if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
-        pip install --user "${AIRFLOW_INSTALL_SOURCES}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \
+    if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
+        pip install --user "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \
             --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
     if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
         pip install --user ${ADDITIONAL_PYTHON_DEPS} --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
-    if [[ ${AIRFLOW_LOCAL_PIP_WHEELS} == "true" ]]; then \
+    if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
         if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
-            pip install --user --no-deps /docker-context-files/*.whl; \
+            pip install --user --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi; \
     find /root/.local/ -name '*.pyc' -print0 | xargs -0 rm -r || true ; \
@@ -273,6 +291,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow-build-image" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.buildImage.buildId=${BUILD_ID} \
   org.apache.airflow.buildImage.commitSha=${COMMIT_SHA}
 
@@ -434,6 +453,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.uid="${AIRFLOW_UID}" \
   org.apache.airflow.gid="${AIRFLOW_GID}" \
   org.apache.airflow.mainImage.buildId=${BUILD_ID} \
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 32b9383..2210989 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -394,6 +394,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow-ci" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.uid="0" \
   org.apache.airflow.gid="0" \
   org.apache.airflow.buildId=${BUILD_ID} \
diff --git a/IMAGES.rst b/IMAGES.rst
index 94ad6fd..8804d1f 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -310,7 +310,7 @@ additional apt dev and runtime dependencies.
   docker build . -f Dockerfile.ci \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -345,7 +345,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
   docker build . -f Dockerfile.ci \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
diff --git a/breeze-complete b/breeze-complete
index 5502eec..819938b 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -52,13 +52,12 @@ _breeze_allowed_package_formats="wheel sdist both"
 
 _breeze_allowed_install_airflow_versions=$(cat <<-EOF
 1.10.14
-1.10.13
 1.10.12
 1.10.11
 1.10.10
 1.10.9
-wheel
 none
+wheel
 EOF
 )
 
diff --git a/dev/README.md b/dev/README.md
index b5ad9a3..805e976 100644
--- a/dev/README.md
+++ b/dev/README.md
@@ -15,6 +15,25 @@
  KIND, either express or implied.  See the License for the
  specific language governing permissions and limitations
  under the License.
+ -->
+
+README.md<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
 -->
 <!-- START doctoc generated TOC please keep comment here to allow auto update -->
 <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
@@ -22,21 +41,10 @@
 
 - [Apache Airflow source releases](#apache-airflow-source-releases)
   - [Apache Airflow Package](#apache-airflow-package)
-  - [Backport Provider packages](#backport-provider-packages)
 - [Prerequisites for the release manager preparing the release](#prerequisites-for-the-release-manager-preparing-the-release)
   - [Upload Public keys to id.apache.org](#upload-public-keys-to-idapacheorg)
   - [Configure PyPI uploads](#configure-pypi-uploads)
   - [Hardware used to prepare and verify the packages](#hardware-used-to-prepare-and-verify-the-packages)
-- [Apache Airflow packages](#apache-airflow-packages)
-  - [Prepare the Apache Airflow Package RC](#prepare-the-apache-airflow-package-rc)
-  - [Vote and verify the Apache Airflow release candidate](#vote-and-verify-the-apache-airflow-release-candidate)
-  - [Publish the final Apache Airflow release](#publish-the-final-apache-airflow-release)
-- [Provider Packages](#provider-packages)
-  - [Decide when to release](#decide-when-to-release)
-  - [Prepare the Backport Provider Packages RC](#prepare-the-backport-provider-packages-rc)
-  - [Vote and verify the Backport Providers release candidate](#vote-and-verify-the-backport-providers-release-candidate)
-  - [Publish the final releases of backport packages](#publish-the-final-releases-of-backport-packages)
-  - [Prepare the Regular Provider Packages Alpha](#prepare-the-regular-provider-packages-alpha)
 
 <!-- END doctoc generated TOC please keep comment here to allow auto update -->
 
@@ -52,11 +60,11 @@ The Apache Airflow releases are one of the two types:
 This package contains sources that allow the user building fully-functional Apache Airflow 2.0 package.
 They contain sources for:
 
- * "apache-airflow" python package that installs "airflow" Python package and includes
-   all the assets required to release the webserver UI coming with Apache Airflow
- * Dockerfile and corresponding scripts that build and use an official DockerImage
- * Breeze development environment that helps with building images and testing locally
-   apache airflow built from sources
+* "apache-airflow" python package that installs "airflow" Python package and includes
+  all the assets required to release the webserver UI coming with Apache Airflow
+* Dockerfile and corresponding scripts that build and use an official DockerImage
+* Breeze development environment that helps with building images and testing locally
+  apache airflow built from sources
 
 In the future (Airflow 2.0) this package will be split into separate "core" and "providers" packages that
 will be distributed separately, following the mechanisms introduced in Backport Package Providers. We also
@@ -76,38 +84,8 @@ cannot or do not want to build the packages themselves can use them as a conveni
 Apache Airflow, however they are not considered as "official source releases". You can read more
 details about it in the [ASF Release Policy](http://www.apache.org/legal/release-policy.html).
 
-This document describes the process of releasing both - official source packages and convenience
-packages for Apache Airflow packages.
-
-## Backport Provider packages
-
-The Backport Provider packages are packages (per provider) that make it possible to easily use Hooks,
-Operators, Sensors, and Secrets from the 2.0 version of Airflow in the 1.10.* series.
-
-Once you release the packages, you can simply install them with:
-
-```
-pip install apache-airflow-backport-providers-<PROVIDER>[<EXTRAS>]
-```
-
-Where `<PROVIDER>` is the provider id and `<EXTRAS>` are optional extra packages to install.
-You can find the provider packages dependencies and extras in the README.md files in each provider
-package (in `airflow/providers/<PROVIDER>` folder) as well as in the PyPI installation page.
-
-Backport providers are a great way to migrate your DAGs to Airflow-2.0 compatible DAGs. You can
-switch to the new Airflow-2.0 packages in your DAGs, long before you attempt to migrate
-airflow to 2.0 line.
-
-The sources released in SVN allow to build all the provider packages by the user, following the
-instructions and scripts provided. Those are also "official_source releases" as described in the
-[ASF Release Policy](http://www.apache.org/legal/release-policy.html) and they are available
-via [Official Apache Download sources](https://downloads.apache.org/airflow/backport-providers/).
-
-There are also 50+ convenience packages released as "apache-airflow-backport-providers" separately in
-PyPI. You can find them all by [PyPI query](https://pypi.org/search/?q=apache-airflow-backport-providers)
-
-The document describes the process of releasing both - official source packages and convenience
-packages for Backport Provider Packages.
+Detailed instruction of releasing Provider Packages can be found in the
+[README_RELEASE_AIRFLOW.md](README_RELEASE_AIRFLOW.md)
 
 # Prerequisites for the release manager preparing the release
 
@@ -177,7 +155,7 @@ password=<API Upload Token>
 Set proper permissions for the pypirc file:
 
 ```shell script
-$ chmod 600 ~/.pypirc
+chmod 600 ~/.pypirc
 ```
 
 - Install [twine](https://pypi.org/project/twine/) if you do not have it already (it can be done
@@ -190,9 +168,7 @@ pip install twine
 (more details [here](https://peterdowns.com/posts/first-time-with-pypi.html).)
 
 - Set proper permissions for the pypirc file:
-`$ chmod 600 ~/.pypirc`
-
-- Confirm that `airflow/version.py` is set properly.
+  `$ chmod 600 ~/.pypirc`
 
 
 ## Hardware used to prepare and verify the packages
@@ -202,1434 +178,3 @@ by the committer acting as release manager. While strictly speaking, releases mu
 on hardware owned and controlled by the committer, for practical reasons it's best if the packages are
 prepared using such hardware. More information can be found in this
 [FAQ](http://www.apache.org/legal/release-policy.html#owned-controlled-hardware)
-
-# Apache Airflow packages
-
-## Prepare the Apache Airflow Package RC
-
-### Build RC artifacts (both source packages and convenience packages)
-
-The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any modification than renaming – i.e. the contents of the files must be the same between voted release canidate and final release. Because of this the version in the built artifacts that will become the official Apache releases must not include the rcN suffix.
-
-- Set environment variables
-
-    ```shell script
-    # Set Version
-    export VERSION=1.10.2rc3
-
-
-    # Set AIRFLOW_REPO_ROOT to the path of your git repo
-    export AIRFLOW_REPO_ROOT=$(pwd)
-
-
-    # Example after cloning
-    git clone https://github.com/apache/airflow.git airflow
-    cd airflow
-    export AIRFLOW_REPO_ROOT=$(pwd)
-    ```
-
-- Set your version to 1.10.2 in `airflow/version.py` (without the RC tag)
-- Commit the version change.
-
-- Tag your release
-
-    ```shell script
-    git tag -s ${VERSION}
-    ```
-
-- Clean the checkout: the sdist step below will
-
-    ```shell script
-    git clean -fxd
-    ```
-
-- Tarball the repo
-
-    ```shell script
-    git archive --format=tar.gz ${VERSION} --prefix=apache-airflow-${VERSION}/ -o apache-airflow-${VERSION}-source.tar.gz`
-    ```
-
-
-- Generate sdist
-
-    NOTE: Make sure your checkout is clean at this stage - any untracked or changed files will otherwise be included
-     in the file produced.
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel
-    ```
-
-- Rename the sdist
-
-    ```shell script
-    mv dist/apache-airflow-${VERSION%rc?}.tar.gz apache-airflow-${VERSION}-bin.tar.gz
-    mv dist/apache_airflow-${VERSION%rc?}-py2.py3-none-any.whl apache_airflow-${VERSION}-py2.py3-none-any.whl
-    ```
-
-- Generate SHA512/ASC (If you have not generated a key yet, generate it by following instructions on http://www.apache.org/dev/openpgp.html#key-gen-generate-key)
-
-    ```shell script
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-source.tar.gz
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-bin.tar.gz
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache_airflow-${VERSION}-py2.py3-none-any.whl
-    ```
-
-- Push Tags
-
-    ```shell script
-    git push origin ${VERSION}
-    ```
-
-- Push the artifacts to ASF dev dist repo
-```
-# First clone the repo
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# Create new folder for the release
-cd airflow-dev
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder & commit
-mv ${AIRFLOW_REPO_ROOT}/apache{-,_}airflow-${VERSION}* ${VERSION}/
-cd ${VERSION}
-svn add *
-svn commit -m "Add artifacts for Airflow ${VERSION}"
-```
-
-### Prepare PyPI convenience "snapshot" packages
-
-At this point we have the artefact that we vote on, but as a convenience to developers we also want to
-publish "snapshots" of the RC builds to pypi for installing via pip. To do this we need to
-
-- Edit the `airflow/version.py` to include the RC suffix.
-
-- Build the package:
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel
-    ```
-
-- Verify the artifacts that would be uploaded:
-
-    ```shell script
-    twine check dist/*
-    ```
-
-- Upload the package to PyPi's test environment:
-
-    ```shell script
-    twine upload -r pypitest dist/*
-    ```
-
-- Verify that the test package looks good by downloading it and installing it into a virtual environment. The package download link is available at:
-https://test.pypi.org/project/apache-airflow/#files
-
-- Upload the package to PyPi's production environment:
-`twine upload -r pypi dist/*`
-
-- Again, confirm that the package is available here:
-https://pypi.python.org/pypi/apache-airflow
-
-- Throw away the change - we don't want to commit this: `git checkout airflow/version.py`
-
-It is important to stress that this snapshot should not be named "release", and it
-is not supposed to be used by and advertised to the end-users who do not read the devlist.
-
-## Vote and verify the Apache Airflow release candidate
-
-### Prepare Vote email on the Apache Airflow release candidate
-
-- Use the dev/airflow-jira script to generate a list of Airflow JIRAs that were closed in the release.
-
-- Send out a vote to the dev@airflow.apache.org mailing list:
-
-Subject:
-```
-[VOTE] Airflow 1.10.2rc3
-```
-
-Body:
-
-```
-Hey all,
-
-I have cut Airflow 1.10.2 RC3. This email is calling a vote on the release,
-which will last for 72 hours. Consider this my (binding) +1.
-
-Airflow 1.10.2 RC3 is available at:
-https://dist.apache.org/repos/dist/dev/airflow/1.10.2rc3/
-
-*apache-airflow-1.10.2rc3-source.tar.gz* is a source release that comes
-with INSTALL instructions.
-*apache-airflow-1.10.2rc3-bin.tar.gz* is the binary Python "sdist" release.
-
-Public keys are available at:
-https://dist.apache.org/repos/dist/release/airflow/KEYS
-
-Only votes from PMC members are binding, but the release manager should encourage members of the community
-to test the release and vote with "(non-binding)".
-
-The test procedure for PMCs and Contributors who would like to test this RC are described in
-https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate
-
-Please note that the version number excludes the `rcX` string, so it's now
-simply 1.10.2. This will allow us to rename the artifact without modifying
-the artifact checksums when we actually release.
-
-
-Changes since 1.10.2rc2:
-*Bugs*:
-[AIRFLOW-3732] Fix issue when trying to edit connection in RBAC UI
-[AIRFLOW-2866] Fix missing CSRF token head when using RBAC UI (#3804)
-...
-
-
-*Improvements*:
-[AIRFLOW-3302] Small CSS fixes (#4140)
-[Airflow-2766] Respect shared datetime across tabs
-...
-
-
-*New features*:
-[AIRFLOW-2874] Enables FAB's theme support (#3719)
-[AIRFLOW-3336] Add new TriggerRule for 0 upstream failures (#4182)
-...
-
-
-*Doc-only Change*:
-[AIRFLOW-XXX] Fix BashOperator Docstring (#4052)
-[AIRFLOW-3018] Fix Minor issues in Documentation
-...
-
-Cheers,
-<your name>
-```
-
-### Verify the release candidate by PMCs (legal)
-
-#### PMC responsibilities
-
-The PMCs should verify the releases in order to make sure the release is following the
-[Apache Legal Release Policy](http://www.apache.org/legal/release-policy.html).
-
-At least 3 (+1) votes should be recorded in accordance to
-[Votes on Package Releases](https://www.apache.org/foundation/voting.html#ReleaseVotes)
-
-The legal checks include:
-
-* checking if the packages are present in the right dist folder on svn
-* verifying if all the sources have correct licences
-* verifying if release manager signed the releases with the right key
-* verifying if all the checksums are valid for the release
-
-#### SVN check
-
-The files should be present in the sub-folder of
-[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/)
-
-The following files should be present (9 files):
-
-* -bin-tar.gz + .asc + .sha512
-* -source.tar.gz + .asc + .sha512
-* -.whl + .asc + .sha512
-
-As a PMC you should be able to clone the SVN repository:
-
-```shell script
-svn co https://dist.apache.org/repos/dist/dev/airflow
-```
-
-Or update it if you already checked it out:
-
-```shell script
-svn update .
-```
-
-#### Verify the licences
-
-This can be done with the Apache RAT tool.
-
-* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
-  the jar is inside)
-* Unpack the -source.tar.gz to a folder
-* Enter the folder and run the check (point to the place where you extracted the .jar)
-
-```shell script
-java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
-```
-
-#### Verify the signatures
-
-Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
-
-You can import the whole KEYS file:
-
-```shell script
-gpg --import KEYS
-```
-
-You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
-retrieves it from the default GPG keyserver
-[OpenPGP.org](https://keys.openpgp.org):
-
-```shell script
-gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-You should choose to import the key when asked.
-
-Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
-errors or timeouts. Many of the release managers also uploaded their keys to the
-[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
-
-```shell script
-gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-Once you have the keys, the signatures can be verified by running this:
-
-```shell script
-for i in *.asc
-do
-   echo "Checking $i"; gpg --verify `basename $i .sha512 `
-done
-```
-
-This should produce results similar to the below. The "Good signature from ..." is indication
-that the signatures are correct. Do not worry about the "not certified with a trusted signature"
-warning. Most of the certificates used by release managers are self signed, that's why you get this
-warning. By importing the server in the previous step and importing it via ID from
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
-this is a valid Key already.
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
-gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
-gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-```
-
-#### Verify the SHA512 sum
-
-Run this:
-
-```shell script
-for i in *.sha512
-do
-    echo "Checking $i"; gpg --print-md SHA512 `basename $i .sha512 ` | diff - $i
-done
-```
-
-You should get output similar to:
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
-Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
-```
-
-### Verify if the release candidate "works" by Contributors
-
-This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
-actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
-is available on PyPI apart from SVN packages, so everyone should be able to install
-the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
-release candidate number 1,2,3,....).
-
-```shell script
-pip install apache-airflow==<VERSION>rc<X>
-```
-Optionally it can be followed with constraints
-
-```shell script
-pip install apache-airflow==<VERSION>rc<X> \
-  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-<VERSION>/constraints-3.6.txt"`
-```
-
-Note that the constraints contain python version that you are installing it with.
-
-You can use any of the installation methods you prefer (you can even install it via the binary wheel
-downloaded from the SVN).
-
-There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
-Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres
-```
-
-For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres --no-rbac-ui
-```
-
-Once you install and run Airflow, you should perform any verification you see as necessary to check
-that the Airflow works as you expected.
-
-## Publish the final Apache Airflow release
-
-### Summarize the voting for the Apache Airflow release
-
-Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
-
-Subject:
-```
-[RESULT][VOTE] Airflow 1.10.2rc3
-```
-
-Message:
-
-```
-Hello,
-
-Apache Airflow 1.10.2 (based on RC3) has been accepted.
-
-4 “+1” binding votes received:
-- Kaxil Naik  (binding)
-- Bolke de Bruin (binding)
-- Ash Berlin-Taylor (binding)
-- Tao Feng (binding)
-
-
-4 "+1" non-binding votes received:
-
-- Deng Xiaodong (non-binding)
-- Stefan Seelmann (non-binding)
-- Joshua Patchus (non-binding)
-- Felix Uellendall (non-binding)
-
-Vote thread:
-https://lists.apache.org/thread.html/736404ca3d2b2143b296d0910630b9bd0f8b56a0c54e3a05f4c8b5fe@%3Cdev.airflow.apache.org%3E
-
-I'll continue with the release process, and the release announcement will follow shortly.
-
-Cheers,
-<your name>
-```
-
-
-### Publish release to SVN
-
-You need to migrate the RC artifacts that passed to this repository:
-https://dist.apache.org/repos/dist/release/airflow/
-(The migration should include renaming the files so that they no longer have the RC number in their filenames.)
-
-The best way of doing this is to svn cp  between the two repos (this avoids having to upload the binaries again, and gives a clearer history in the svn commit logs):
-
-```shell script
-# First clone the repo
-export RC=1.10.4rc5
-export VERSION=${RC/rc?/}
-svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
-
-# Create new folder for the release
-cd airflow-release
-svn mkdir ${VERSION}
-cd ${VERSION}
-
-# Move the artifacts to svn folder & commit
-for f in ../../airflow-dev/$RC/*; do svn cp $f ${$(basename $f)/rc?/}; done
-svn commit -m "Release Airflow ${VERSION} from ${RC}"
-
-# Remove old release
-# http://www.apache.org/legal/release-policy.html#when-to-archive
-cd ..
-export PREVIOUS_VERSION=1.10.1
-svn rm ${PREVIOUS_VERSION}
-svn commit -m "Remove old release: ${PREVIOUS_VERSION}"
-```
-
-Verify that the packages appear in [airflow](https://dist.apache.org/repos/dist/release/airflow/)
-
-### Prepare PyPI "release" packages
-
-At this point we release an official package:
-
-- Build the package:
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel`
-    ```
-
-- Verify the artifacts that would be uploaded:
-
-    ```shell script
-    twine check dist/*`
-    ```
-
-- Upload the package to PyPi's test environment:
-
-    ```shell script
-    twine upload -r pypitest dist/*
-    ```
-
-- Verify that the test package looks good by downloading it and installing it into a virtual environment.
-    The package download link is available at: https://test.pypi.org/project/apache-airflow/#files
-
-- Upload the package to PyPi's production environment:
-
-    ```shell script
-    twine upload -r pypi dist/*
-    ```
-
-- Again, confirm that the package is available here: https://pypi.python.org/pypi/apache-airflow
-
-### Update CHANGELOG.md
-
-- Get a diff between the last version and the current version:
-
-    ```shell script
-    $ git log 1.8.0..1.9.0 --pretty=oneline
-    ```
-- Update CHANGELOG.md with the details, and commit it.
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow ${VERSION} is released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that Airflow ${VERSION} was just released.
-
-The source release, as well as the binary "sdist" release, are available
-here:
-
-https://dist.apache.org/repos/dist/release/airflow/${VERSION}/
-
-We also made this version available on PyPi for convenience (`pip install apache-airflow`):
-
-https://pypi.python.org/pypi/apache-airflow
-
-The documentation is available on:
-https://airflow.apache.org/
-https://airflow.apache.org/1.10.2/
-https://airflow.readthedocs.io/en/1.10.2/
-https://airflow.readthedocs.io/en/stable/
-
-Find the CHANGELOG here for more details:
-
-https://airflow.apache.org/changelog.html#airflow-1-10-2-2019-01-19
-
-Cheers,
-<your name>
-EOF
-```
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
-
-
------------------------------------------------------------------------------------------------------------
-
-
-# Provider Packages
-
-You can read more about the command line tools used to generate the packages and the two types of
-packages we have (Backport and Regular Provider Packages) in [Provider packages](PROVIDER_PACKAGES.md).
-
-## Decide when to release
-
-You can release provider packages separately from the main Airflow on an ad-hoc basis, whenever we find that
-a given provider needs to be released - due to new features or due to bug fixes.
-You can release each provider package separately, but due to voting and release overhead we try to group
-releases of provider packages together.
-
-### Backport provider packages versioning
-
-We are using the [CALVER](https://calver.org/) versioning scheme for the backport packages. We also have an
-automated way to prepare and build the packages, so it should be very easy to release the packages often and
-separately. Backport packages will be maintained for three months after 2.0.0 version of Airflow, and it is
-really a bridge, allowing people to migrate to Airflow 2.0 in stages, so the overhead of maintaining
-semver versioning does not apply there - subsequent releases might be backward-incompatible, and it is
-not indicated by the version of the packages.
-
-### Regular provider packages versioning
-
-We are using the [SEMVER](https://semver.org/) versioning scheme for the regular packages. This is in order
-to give the users confidence about maintaining backwards compatibility in the new releases of those
-packages.
-
-Details about maintaining the SEMVER version are going to be discussed and implemented in
-[the related issue](https://github.com/apache/airflow/issues/11425)
-
-## Prepare the Backport Provider Packages RC
-
-### Generate release notes
-
-Prepare release notes for all the packages you plan to release. Where YYYY.MM.DD is the CALVER
-date for the packages.
-
-```shell script
-./breeze --backports prepare-provider-readme YYYY.MM.DD [packages]
-```
-
-If you iterate with merges and release candidates you can update the release date without providing
-the date (to update the existing release notes)
-
-```shell script
-./breeze --backports prepare-provider-readme google
-```
-
-Generated readme files should be eventually committed to the repository.
-
-### Build an RC release for SVN apache upload
-
-The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any
-modification than renaming i.e. the contents of the files must be the same between voted
-release candidate and final release. Because of this the version in the built artifacts
-that will become the official Apache releases must not include the rcN suffix. They also need
-to be signed and have checksum files. You can generate the checksum/signature files by running
-the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script
-generates corresponding .asc and .sha512 files for each file to sign.
-
-#### Build and sign the source and convenience packages
-
-* Set environment variables (version and root of airflow repo)
-
-```shell script
-export VERSION=2020.5.20rc2
-export AIRFLOW_REPO_ROOT=$(pwd)
-
-```
-
-* Build the source package:
-
-```shell script
-./provider_packages/build_source_package.sh --backports
-```
-
-It will generate `apache-airflow-backport-providers-${VERSION}-source.tar.gz`
-
-* Generate the packages - since we are preparing packages for SVN repo, we should use the right switch. Note
-  that this will clean up dist folder before generating the packages, so it will only contain the packages
-  you intended to build.
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-svn rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-svn rc1 PACKAGE PACKAGE ....
-```
-
-* Move the source tarball to dist folder
-
-```shell script
-mv apache-airflow-backport-providers-${VERSION}-source.tar.gz dist
-```
-
-* Sign all your packages
-
-```shell script
-pushd dist
-../dev/sign.sh *
-popd
-```
-
-* Push tags to Apache repository (assuming that you have apache remote pointing to apache/airflow repo)]
-
-```shell script
-git push apache backport-providers-${VERSION}
-```
-
-#### Commit the source packages to Apache SVN repo
-
-* Push the artifacts to ASF dev dist repo
-
-```shell script
-# First clone the repo if you do not have it
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# update the repo in case you have it already
-cd airflow-dev
-svn update
-
-# Create a new folder for the release.
-cd airflow-dev/backport-providers
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder
-mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/
-
-# Add and commit
-svn add ${VERSION}/*
-svn commit -m "Add artifacts for Airflow ${VERSION}"
-
-cd ${AIRFLOW_REPO_ROOT}
-```
-
-Verify that the files are available at
-[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-### Publish the RC convenience package to PyPI
-
-In order to publish to PyPI you just need to build and release packages. The packages should however
-contain the rcN suffix in the version name as well, so you need to use `--version-suffix-for-pypi` switch
-to prepare those packages. Note that these are different packages than the ones used for SVN upload
-though they should be generated from the same sources.
-
-* Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that
-this will clean up dist folder before generating the packages, so you will only have the right packages there.
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-pypi rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-pypi rc1 PACKAGE PACKAGE ....
-```
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-* Copy the list of links to the uploaded packages - they will be useful in preparing VOTE email.
-
-* Again, confirm that the packages are available under the links printed.
-
-## Vote and verify the Backport Providers release candidate
-
-### Prepare voting email for Backport Providers release candidate
-
-Make sure the packages are in https://dist.apache.org/repos/dist/dev/airflow/backport-providers/
-
-Send out a vote to the dev@airflow.apache.org mailing list. Here you can prepare text of the
-email using the ${VERSION} variable you already set in the command line.
-
-subject:
-
-
-```shell script
-cat <<EOF
-[VOTE] Airflow Backport Providers ${VERSION}
-EOF
-```
-
-```shell script
-cat <<EOF
-Hey all,
-
-I have cut Airflow Backport Providers ${VERSION}. This email is calling a vote on the release,
-which will last for 72 hours - which means that it will end on $(date -d '+3 days').
-
-Consider this my (binding) +1.
-
-Airflow Backport Providers ${VERSION} are available at:
-https://dist.apache.org/repos/dist/dev/airflow/backport-providers/${VERSION}/
-
-*apache-airflow-backport-providers-${VERSION}-source.tar.gz* is a source release that comes
- with INSTALL instructions.
-
-*apache-airflow-backport-providers-<PROVIDER>-${VERSION}-bin.tar.gz* are the binary
- Python "sdist" release.
-
-The test procedure for PMCs and Contributors who would like to test the RC candidates are described in
-https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-backport-providers-release-candidate
-
-
-Public keys are available at:
-https://dist.apache.org/repos/dist/release/airflow/KEYS
-
-Please vote accordingly:
-
-[ ] +1 approve
-[ ] +0 no opinion
-[ ] -1 disapprove with the reason
-
-
-Only votes from PMC members are binding, but members of the community are
-encouraged to test the release and vote with "(non-binding)".
-
-Please note that the version number excludes the 'rcX' string, so it's now
-simply ${VERSION%rc?}. This will allow us to rename the artifact without modifying
-the artifact checksums when we actually release.
-
-Each of the packages contains detailed changelog. Here is the list of links to
-the released packages and changelogs:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-Cheers,
-<TODO: Your Name>
-
-EOF
-```
-
-Due to the nature of backport packages, not all packages have to be released as convenience
-packages in the final release. During the voting process
-the voting PMCs might decide to exclude certain packages from the release if some critical
-problems have been found in some packages.
-
-Please modify the message above accordingly to clearly exclude those packages.
-
-### Verify the release
-
-#### SVN check
-
-The files should be present in the sub-folder of
-[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-The following files should be present (9 files):
-
-* -source.tar.gz + .asc + .sha512 (one set of files)
-* -bin-tar.gz + .asc + .sha512 (one set of files per provider)
-* -.whl + .asc + .sha512 (one set of files per provider)
-
-As a PMC you should be able to clone the SVN repository:
-
-```shell script
-svn co https://dist.apache.org/repos/dist/dev/airflow/
-```
-
-Or update it if you already checked it out:
-
-```shell script
-svn update .
-```
-
-#### Verify the licences
-
-This can be done with the Apache RAT tool.
-
-* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
-  the jar is inside)
-* Unpack the -source.tar.gz to a folder
-* Enter the folder and run the check (point to the place where you extracted the .jar)
-
-```shell script
-java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
-```
-
-#### Verify the signatures
-
-Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
-
-You can import the whole KEYS file:
-
-```shell script
-gpg --import KEYS
-```
-
-You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
-retrieves it from the default GPG keyserver
-[OpenPGP.org](https://keys.openpgp.org):
-
-```shell script
-gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-You should choose to import the key when asked.
-
-Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
-errors or timeouts. Many of the release managers also uploaded their keys to the
-[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
-
-```shell script
-gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-Once you have the keys, the signatures can be verified by running this:
-
-```shell script
-for i in *.asc
-do
-   echo "Checking $i"; gpg --verify `basename $i .sha512 `
-done
-```
-
-This should produce results similar to the below. The "Good signature from ..." is indication
-that the signatures are correct. Do not worry about the "not certified with a trusted signature"
-warning. Most of the certificates used by release managers are self signed, that's why you get this
-warning. By importing the server in the previous step and importing it via ID from
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
-this is a valid Key already.
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
-gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
-gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-```
-
-#### Verify the SHA512 sum
-
-Run this:
-
-```shell script
-for i in *.sha512
-do
-    echo "Checking $i"; gpg --print-md SHA512 `basename $i .sha512 ` | diff - $i
-done
-```
-
-You should get output similar to:
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
-Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
-```
-
-### Verify if the Backport Packages release candidates "work" by Contributors
-
-This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
-actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
-is available on PyPI apart from SVN packages, so everyone should be able to install
-the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
-release candidate number 1,2,3,....).
-
-You can use any of the installation methods you prefer (you can even install it via the binary wheels
-downloaded from the SVN).
-
-
-#### Installing in your local virtualenv
-
-You have to make sure you have Airilow 1.10.* installed in your PIP virtualenv
-(the version you want to install providers with).
-
-```shell script
-pip install apache-airflow-backport-providers-<provider>==<VERSION>rc<X>
-```
-
-#### Installing with Breeze
-
-There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
-Here is a typical scenario.
-
-First copy all the provider packages .whl files to the `dist` folder.
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> \
-    --python 3.7 --backend postgres --install-wheels
-```
-
-For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> \
-    --python 3.7 --backend postgres --install-wheels --no-rbac-ui
-```
-
-#### Building your own docker image
-
-If you prefer to build your own image, you can also use the official image and PyPI packages to test
-backport packages. This is especially helpful when you want to test integrations, but you need to install
-additional tools. Below is an example Dockerfile, which installs backport providers for Google and
-an additional third-party tools:
-
-```dockerfile
-FROM apache/airflow:1.10.12
-
-RUN pip install --user apache-airflow-backport-providers-google==2020.10.5.rc1
-
-RUN curl https://sdk.cloud.google.com | bash \
-    && echo "source /home/airflow/google-cloud-sdk/path.bash.inc" >> /home/airflow/.bashrc \
-    && echo "source /home/airflow/google-cloud-sdk/completion.bash.inc" >> /home/airflow/.bashrc
-
-USER 0
-RUN KUBECTL_VERSION="$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)" \
-    && KUBECTL_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl" \
-    && curl -L "${KUBECTL_URL}" --output /usr/local/bin/kubectl \
-    && chmod +x /usr/local/bin/kubectl
-
-USER ${AIRFLOW_UID}
-```
-
-To build an image build and run a shell, run:
-
-```shell script
-docker build . -t my-airflow
-docker run  -ti \
-    --rm \
-    -v "$PWD/data:/opt/airflow/" \
-    -v "$PWD/keys/:/keys/" \
-    -p 8080:8080 \
-    -e GOOGLE_APPLICATION_CREDENTIALS=/keys/sa.json \
-    -e AIRFLOW__CORE__LOAD_EXAMPLES=True \
-    my-airflow bash
-```
-
-#### Verification
-
-Once you install and run Airflow, you can perform any verification you see as necessary to check
-that the Airflow works as you expected.
-
-## Publish the final releases of backport packages
-
-### Summarize the voting for the Backport Providers Release
-
-Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
-
-Subject:
-```shell script
-cat <<EOF
-[RESULT][VOTE] Airflow Backport Providers ${VERSION}
-EOF
-```
-
-Body:
-
-```shell script
-cat <<EOF
-
-Hey all,
-
-Airflow Backport Providers ${VERSION} (based on the ${VERSION_RC} candidate) has been accepted.
-
-N "+1" binding votes received:
-- PMC Member  (binding)
-...
-
-N "+1" non-binding votes received:
-
-- COMMITER (non-binding)
-
-Vote thread:
-https://lists.apache.org/thread.html/<TODO:REPLACE_ME_WITH_THE_VOTING_THREAD>@%3Cdev.airflow.apache.org%3E
-
-I'll continue with the release process and the release announcement will follow shortly.
-
-Cheers,
-<TODO: Your Name>
-
-EOF
-
-```
-
-### Publish release to SVN
-
-The best way of doing this is to svn cp  between the two repos (this avoids having to upload the binaries
-again, and gives a clearer history in the svn commit logs.
-
-We also need to archive older releases before copying the new ones
-[Release policy](http://www.apache.org/legal/release-policy.html#when-to-archive)
-
-```shell script
-# Set the variables
-export VERSION_RC=2020.5.20rc2
-export VERSION=${VERSION_RC/rc?/}
-
-# Set AIRFLOW_REPO_ROOT to the path of your git repo
-export AIRFLOW_REPO_ROOT=$(pwd)
-
-# Go to the directory where you have checked out the dev svn release
-# And go to the sub-folder with RC candidates
-cd "<ROOT_OF_YOUR_DEV_REPO>/backport-providers/${VERSION_RC}"
-export SOURCE_DIR=$(pwd)
-
-# Go the folder where you have checked out the release repo
-# Clone it if it's not done yet
-svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
-
-# Update to latest version
-svn update
-
-# Create backport-providers folder if it does not exist
-# All latest releases are kept in this one folder without version sub-folder
-mkdir -pv backport-providers
-cd backport-providers
-
-# Move the artifacts to svn folder & remove the rc postfix
-for file in ${SOURCE_DIR}/*${VERSION_RC}*
-do
-  base_file=$(basename ${file})
-  svn cp "${file}" "${base_file/${VERSION_RC}/${VERSION}}"
-done
-
-
-# If some packages have been excluded, remove them now
-# Check the packages
-ls *<provider>*
-# Remove them
-svn rm *<provider>*
-
-# Check which old packages will be removed (you need python 3.6+)
-python ${AIRFLOW_REPO_ROOT}/provider_packages/remove_old_releases.py \
-    --directory .
-
-# Remove those packages
-python ${AIRFLOW_REPO_ROOT}/provider_packages/remove_old_releases.py \
-    --directory . --execute
-
-
-# Commit to SVN
-svn commit -m "Release Airflow Backport Providers ${VERSION} from ${VERSION_RC}"
-```
-
-Verify that the packages appear in
-[backport-providers](https://dist.apache.org/repos/dist/release/airflow/backport-providers)
-
-### Publish the final version convenience package to PyPI
-
-Checkout the RC Version:
-
-```shell script
-git checkout backport-providers-${VERSION_RC}
-```
-
-Tag and push the final version (providing that your apache remote is named 'apache'):
-
-```shell script
-git tag backport-providers-${VERSION}
-git push apache backport-providers-${VERSION}
-```
-
-In order to publish to PyPI you just need to build and release packages.
-
-* Generate the packages.
-
-```shell script
-./breeze --backports prepare-provider-packages
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages <PACKAGE> ...
-```
-
-In case you decided to remove some of the packages. remove them from dist folder now:
-
-```shell script
-ls dist/*<provider>*
-rm dist/*<provider>*
-```
-
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow Backport Providers ${VERSION} are released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that Airflow Backport Providers packages ${VERSION} were just released.
-
-The source release, as well as the binary releases, are available here:
-
-https://dist.apache.org/repos/dist/release/airflow/backport-providers/
-
-We also made those versions available on PyPi for convenience ('pip install apache-airflow-backport-providers-*'):
-
-https://pypi.org/search/?q=apache-airflow-backport-providers
-
-The documentation and changelogs are available in the PyPI packages:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-
-Cheers,
-<your name>
-EOF
-```
-
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
-
------------------------------------------------------------------------------------------------------------
-
-## Prepare the Regular Provider Packages Alpha
-
-### Generate release notes
-
-Prepare release notes for all the packages you plan to release. Note that for now version number is
-hard-coded to 0.0.1 for all packages. Later on we are going to update the versions according
-to SEMVER versioning.
-
-Details about maintaining the SEMVER version are going to be discussed and implemented in
-[the related issue](https://github.com/apache/airflow/issues/11425)
-
-
-```shell script
-./breeze prepare-provider-readme [packages]
-```
-
-You can iterate and re-generate the same readme content as many times as you want.
-Generated readme files should be eventually committed to the repository.
-
-### Build regular provider packages for SVN apache upload
-
-There is a slightly different procedure if you build pre-release (alpha/beta) packages and the
-release candidates. For the Alpha artifacts there is no voting and signature/checksum check, so
-we do not need to care about this part. For release candidates - those packages might get promoted
-to "final" packages by just renaming the files, so internally they should keep the final version
-number without the rc suffix, even if they are rc1/rc2/... candidates.
-
-They also need to be signed and have checksum files. You can generate the checksum/signature files by running
-the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script
-generates corresponding .asc and .sha512 files for each file to sign.
-
-#### Build and sign the source and convenience packages
-
-Currently, we are releasing alpha provider packages together with the main sources of Airflow. In the future
-we are going to add procedure to release the sources of released provider packages separately.
-Details are in [the related issue](https://github.com/apache/airflow/issues/11425)
-
-For alpha/beta releases you need to specify both - svn and pyp i - suffixes, and they have to match. This is
-verified by the breeze script. Note that the script will clean up dist folder before generating the
-packages, so it will only contain the packages you intended to build.
-
-* Pre-release packages:
-
-```shell script
-export VERSION=0.0.1alpha1
-
-./breeze prepare-provider-packages --version-suffix-for-svn a1 --version-suffix-for-pypi a1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-svn a1 --version-suffix-for-pypi a1 \
-    PACKAGE PACKAGE ....
-```
-
-* Release candidate packages:
-
-```shell script
-export VERSION=0.0.1alpha1
-
-./breeze prepare-provider-packages --version-suffix-for-svn rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-svn rc1 PACKAGE PACKAGE ....
-```
-
-* Sign all your packages
-
-```shell script
-pushd dist
-../dev/sign.sh *
-popd
-```
-
-#### Commit the source packages to Apache SVN repo
-
-* Push the artifacts to ASF dev dist repo
-
-```shell script
-# First clone the repo if you do not have it
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# update the repo in case you have it already
-cd airflow-dev
-svn update
-
-# Create a new folder for the release.
-cd airflow-dev/providers
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder
-mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/
-
-# Add and commit
-svn add ${VERSION}/*
-svn commit -m "Add artifacts for Airflow Providers ${VERSION}"
-
-cd ${AIRFLOW_REPO_ROOT}
-```
-
-Verify that the files are available at
-[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-### Publish the Regular convenience package to PyPI
-
-
-In case of pre-release versions you build the same packages for both PyPI and SVN so you can simply use
-packages generated in the previous step and you can skip the "prepare" step below.
-
-In order to publish release candidate to PyPI you just need to build and release packages.
-The packages should however contain the rcN suffix in the version file name but not internally in the package,
-so you need to use `--version-suffix-for-pypi` switch to prepare those packages.
-Note that these are different packages than the ones used for SVN upload
-though they should be generated from the same sources.
-
-* Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that
-this will clean up dist folder before generating the packages, so you will only have the right packages there.
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-pypi a1 --version-suffix-for-SVN a1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-pypi a1 \
-    PACKAGE PACKAGE ....
-```
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-* Again, confirm that the packages are available under the links printed.
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow Providers are released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that new version of Airflow Providers packages were just released.
-
-The source release, as well as the binary releases, are available here:
-
-https://dist.apache.org/repos/dist/release/airflow/providers/
-
-We also made those versions available on PyPi for convenience ('pip install apache-airflow-providers-*'):
-
-https://pypi.org/search/?q=apache-airflow-providers
-
-The documentation and changelogs are available in the PyPI packages:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-Cheers,
-<your name>
-EOF
-```
-
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
new file mode 100644
index 0000000..2fb1294
--- /dev/null
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -0,0 +1,733 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<!-- START doctoc generated TOC please keep comment here to allow auto update -->
+<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
+**Table of contents**
+
+- [Prepare the Apache Airflow Package RC](#prepare-the-apache-airflow-package-rc)
+  - [Build RC artifacts](#build-rc-artifacts)
+  - [Prepare PyPI convenience "snapshot" packages](#prepare-pypi-convenience-snapshot-packages)
+  - [\[Optional\] - Manually prepare production Docker Image](#%5Coptional%5C---manually-prepare-production-docker-image)
+  - [Prepare Vote email on the Apache Airflow release candidate](#prepare-vote-email-on-the-apache-airflow-release-candidate)
+- [Verify the release candidate by PMCs](#verify-the-release-candidate-by-pmcs)
+  - [SVN check](#svn-check)
+  - [Licence check](#licence-check)
+  - [Signature check](#signature-check)
+  - [SHA512 sum check](#sha512-sum-check)
+- [Verify release candidates by Contributors](#verify-release-candidates-by-contributors)
+- [Publish the final Apache Airflow release](#publish-the-final-apache-airflow-release)
+  - [Summarize the voting for the Apache Airflow release](#summarize-the-voting-for-the-apache-airflow-release)
+  - [Publish release to SVN](#publish-release-to-svn)
+  - [Prepare PyPI "release" packages](#prepare-pypi-release-packages)
+  - [Update CHANGELOG.md](#update-changelogmd)
+  - [\[Optional\] - Manually prepare production Docker Image](#%5Coptional%5C---manually-prepare-production-docker-image-1)
+  - [Publish documentation](#publish-documentation)
+  - [Notify developers of release](#notify-developers-of-release)
+  - [Update Announcements page](#update-announcements-page)
+
+<!-- END doctoc generated TOC please keep comment here to allow auto update -->
+
+You can find the prerequisites to release Apache Airflow in [README.md](README.md).
+
+# Prepare the Apache Airflow Package RC
+
+## Build RC artifacts
+
+The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any modification than renaming – i.e. the contents of the files must be the same between voted release candidate and final release. Because of this the version in the built artifacts that will become the official Apache releases must not include the rcN suffix.
+
+- Set environment variables
+
+    ```shell script
+    # Set Version
+    export VERSION=1.10.2rc3
+
+
+    # Set AIRFLOW_REPO_ROOT to the path of your git repo
+    export AIRFLOW_REPO_ROOT=$(pwd)
+
+
+    # Example after cloning
+    git clone https://github.com/apache/airflow.git airflow
+    cd airflow
+    export AIRFLOW_REPO_ROOT=$(pwd)
+    ```
+
+- Set your version to 1.10.2 in `setup.py` (without the RC tag)
+- Commit the version change.
+
+- Tag your release
+
+    ```shell script
+    git tag -s ${VERSION}
+    ```
+
+- Clean the checkout: the sdist step below will
+
+    ```shell script
+    git clean -fxd
+    ```
+
+- Tarball the repo
+
+    ```shell script
+    git archive --format=tar.gz ${VERSION} --prefix=apache-airflow-${VERSION}/ -o apache-airflow-${VERSION}-source.tar.gz
+    ```
+
+
+- Generate sdist
+
+    NOTE: Make sure your checkout is clean at this stage - any untracked or changed files will otherwise be included
+     in the file produced.
+
+    ```shell script
+    python setup.py compile_assets sdist bdist_wheel
+    ```
+
+- Rename the sdist
+
+    ```shell script
+    mv dist/apache-airflow-${VERSION%rc?}.tar.gz apache-airflow-${VERSION}-bin.tar.gz
+    mv dist/apache_airflow-${VERSION%rc?}-py2.py3-none-any.whl apache_airflow-${VERSION}-py2.py3-none-any.whl
+    ```
+
+- Generate SHA512/ASC (If you have not generated a key yet, generate it by following instructions on http://www.apache.org/dev/openpgp.html#key-gen-generate-key)
+
+    ```shell script
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-source.tar.gz
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-bin.tar.gz
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache_airflow-${VERSION}-py2.py3-none-any.whl
+    ```
+
+- Tag & Push latest constraints files. This pushes constraints with rc suffix (this is expected)!
+
+    ```shell script
+    git checkout constraints-1-10
+    git tag -s "constraints-${VERSION}"
+    git push origin "constraints-${VERSION}"
+    ```
+
+- Push the artifacts to ASF dev dist repo
+
+```
+# First clone the repo
+svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
+
+# Create new folder for the release
+cd airflow-dev
+svn mkdir ${VERSION}
+
+# Move the artifacts to svn folder & commit
+mv ${AIRFLOW_REPO_ROOT}/apache{-,_}airflow-${VERSION}* ${VERSION}/
+cd ${VERSION}
+svn add *
+svn commit -m "Add artifacts for Airflow ${VERSION}"
+```
+
+## Prepare PyPI convenience "snapshot" packages
+
+At this point we have the artefact that we vote on, but as a convenience to developers we also want to
+publish "snapshots" of the RC builds to pypi for installing via pip. Also those packages
+are used to build the production docker image in DockerHub, so we need to upload the packages
+before we push the tag to GitHub. Pushing the tag to GitHub automatically triggers image building in
+DockerHub.
+
+To do this we need to
+
+- Build the package:
+
+    ```shell script
+    python setup.py compile_assets egg_info --tag-build "$(sed -e "s/^[0-9.]*//" <<<"$VERSION")" sdist bdist_wheel
+    ```
+
+- Verify the artifacts that would be uploaded:
+
+    ```shell script
+    twine check dist/*
+    ```
+
+- Upload the package to PyPi's test environment:
+
+    ```shell script
+    twine upload -r pypitest dist/*
+    ```
+
+- Verify that the test package looks good by downloading it and installing it into a virtual environment. The package download link is available at:
+https://test.pypi.org/project/apache-airflow/#files
+
+- Upload the package to PyPi's production environment:
+`twine upload -r pypi dist/*`
+
+- Again, confirm that the package is available here:
+https://pypi.python.org/pypi/apache-airflow
+
+It is important to stress that this snapshot should not be named "release", and it
+is not supposed to be used by and advertised to the end-users who do not read the devlist.
+
+- Push Tag for the release candidate
+
+    This step should only be done now and not before, because it triggers an automated build of
+    the production docker image, using the packages that are currently released in PyPI
+    (both airflow and latest provider packages).
+
+    ```shell script
+    git push origin ${VERSION}
+    ```
+
+## \[Optional\] - Manually prepare production Docker Image
+
+Production Docker images should be automatically built in 2-3 hours after the release tag has been
+pushed. If this did not happen - please login to DockerHub and check the status of builds:
+[Build Timeline](https://hub.docker.com/repository/docker/apache/airflow/timeline)
+
+In case you need, you can also build and push the images manually:
+
+Airflow 2+:
+
+```shell script
+export DOCKER_REPO=docker.io/apache/airflow
+for python_version in "3.6" "3.7" "3.8"
+(
+  export DOCKER_TAG=${VERSION}-python${python_version}
+  ./scripts/ci/images/ci_build_dockerhub.sh
+)
+```
+
+This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean".
+
+Airflow 1.10:
+
+```shell script
+for python_version in "2.7" "3.5" "3.6" "3.7" "3.8"
+do
+    ./breeze build-image --production-image --python ${python_version} \
+        --image-tag apache/airflow:${VERSION}-python${python_version} --build-cache-local
+    docker push apache/airflow:${VERSION}-python${python_version}
+done
+docker tag apache/airflow:${VERSION}-python3.6 apache/airflow:${VERSION}
+docker push apache/airflow:${VERSION}
+```
+
+
+## Prepare Vote email on the Apache Airflow release candidate
+
+- Use the dev/airflow-jira script to generate a list of Airflow JIRAs that were closed in the release.
+
+- Send out a vote to the dev@airflow.apache.org mailing list:
+
+Subject:
+
+```
+[VOTE] Airflow 1.10.2rc3
+```
+
+Body:
+
+```
+Hey all,
+
+I have cut Airflow 1.10.2 RC3. This email is calling a vote on the release,
+which will last for 72 hours. Consider this my (binding) +1.
+
+Airflow 1.10.2 RC3 is available at:
+https://dist.apache.org/repos/dist/dev/airflow/1.10.2rc3/
+
+*apache-airflow-1.10.2rc3-source.tar.gz* is a source release that comes
+with INSTALL instructions.
+*apache-airflow-1.10.2rc3-bin.tar.gz* is the binary Python "sdist" release.
+
+Public keys are available at:
+https://dist.apache.org/repos/dist/release/airflow/KEYS
+
+Only votes from PMC members are binding, but the release manager should encourage members of the community
+to test the release and vote with "(non-binding)".
+
+The test procedure for PMCs and Contributors who would like to test this RC are described in
+https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate
+
+Please note that the version number excludes the `rcX` string, so it's now
+simply 1.10.2. This will allow us to rename the artifact without modifying
+the artifact checksums when we actually release.
+
+
+Changes since 1.10.2rc2:
+*Bugs*:
+[AIRFLOW-3732] Fix issue when trying to edit connection in RBAC UI
+[AIRFLOW-2866] Fix missing CSRF token head when using RBAC UI (#3804)
+...
+
+
+*Improvements*:
+[AIRFLOW-3302] Small CSS fixes (#4140)
+[Airflow-2766] Respect shared datetime across tabs
+...
+
+
+*New features*:
+[AIRFLOW-2874] Enables FAB's theme support (#3719)
+[AIRFLOW-3336] Add new TriggerRule for 0 upstream failures (#4182)
+...
+
+
+*Doc-only Change*:
+[AIRFLOW-XXX] Fix BashOperator Docstring (#4052)
+[AIRFLOW-3018] Fix Minor issues in Documentation
+...
+
+Cheers,
+<your name>
+```
+
+
+# Verify the release candidate by PMCs
+
+The PMCs should verify the releases in order to make sure the release is following the
+[Apache Legal Release Policy](http://www.apache.org/legal/release-policy.html).
+
+At least 3 (+1) votes should be recorded in accordance to
+[Votes on Package Releases](https://www.apache.org/foundation/voting.html#ReleaseVotes)
+
+The legal checks include:
+
+* checking if the packages are present in the right dist folder on svn
+* verifying if all the sources have correct licences
+* verifying if release manager signed the releases with the right key
+* verifying if all the checksums are valid for the release
+
+## SVN check
+
+The files should be present in the sub-folder of
+[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/)
+
+The following files should be present (9 files):
+
+* -bin-tar.gz + .asc + .sha512
+* -source.tar.gz + .asc + .sha512
+* -.whl + .asc + .sha512
+
+As a PMC you should be able to clone the SVN repository:
+
+```shell script
+svn co https://dist.apache.org/repos/dist/dev/airflow
+```
+
+Or update it if you already checked it out:
+
+```shell script
+svn update .
+```
+
+## Licence check
+
+This can be done with the Apache RAT tool.
+
+* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
+  the jar is inside)
+* Unpack the -source.tar.gz to a folder
+* Enter the folder and run the check (point to the place where you extracted the .jar)
+
+```shell script
+java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
+```
+
+## Signature check
+
+Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
+[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
+
+You can import the whole KEYS file:
+
+```shell script
+gpg --import KEYS
+```
+
+You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
+retrieves it from the default GPG keyserver
+[OpenPGP.org](https://keys.openpgp.org):
+
+```shell script
+gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+```
+
+You should choose to import the key when asked.
+
+Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
+errors or timeouts. Many of the release managers also uploaded their keys to the
+[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
+
+```shell script
+gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+```
+
+Once you have the keys, the signatures can be verified by running this:
+
+```shell script
+for i in *.asc
+do
+   echo "Checking $i"; gpg --verify `basename $i .asc`
+done
+```
+
+This should produce results similar to the below. The "Good signature from ..." is indication
+that the signatures are correct. Do not worry about the "not certified with a trusted signature"
+warning. Most of the certificates used by release managers are self signed, that's why you get this
+warning. By importing the server in the previous step and importing it via ID from
+[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
+this is a valid Key already.
+
+```
+Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
+gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
+gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
+gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
+gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
+gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
+gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+```
+
+## SHA512 sum check
+
+Run this:
+
+```shell script
+for i in *.sha512
+do
+    echo "Checking $i"; shasum -a 512 `basename $i .sha512 ` | diff - $i
+done
+```
+
+You should get output similar to:
+
+```
+Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
+Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
+Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
+```
+
+# Verify release candidates by Contributors
+
+This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
+actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
+is available on PyPI apart from SVN packages, so everyone should be able to install
+the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
+release candidate number 1,2,3,....).
+
+```shell script
+pip install apache-airflow==<VERSION>rc<X>
+```
+
+Optionally it can be followed with constraints
+
+```shell script
+pip install apache-airflow==<VERSION>rc<X> \
+  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-<VERSION>/constraints-3.6.txt"`
+```
+
+Note that the constraints contain python version that you are installing it with.
+
+You can use any of the installation methods you prefer (you can even install it via the binary wheel
+downloaded from the SVN).
+
+There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
+Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler:
+
+```shell script
+./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres
+```
+
+For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
+
+```shell script
+./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres --no-rbac-ui
+```
+
+Once you install and run Airflow, you should perform any verification you see as necessary to check
+that the Airflow works as you expected.
+
+# Publish the final Apache Airflow release
+
+## Summarize the voting for the Apache Airflow release
+
+Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
+
+Subject:
+
+```
+[RESULT][VOTE] Airflow 1.10.2rc3
+```
+
+Message:
+
+```
+Hello,
+
+Apache Airflow 1.10.2 (based on RC3) has been accepted.
+
+4 “+1” binding votes received:
+- Kaxil Naik  (binding)
+- Bolke de Bruin (binding)
+- Ash Berlin-Taylor (binding)
+- Tao Feng (binding)
+
+
+4 "+1" non-binding votes received:
+
+- Deng Xiaodong (non-binding)
+- Stefan Seelmann (non-binding)
+- Joshua Patchus (non-binding)
+- Felix Uellendall (non-binding)
+
+Vote thread:
+https://lists.apache.org/thread.html/736404ca3d2b2143b296d0910630b9bd0f8b56a0c54e3a05f4c8b5fe@%3Cdev.airflow.apache.org%3E
+
+I'll continue with the release process, and the release announcement will follow shortly.
+
+Cheers,
+<your name>
+```
+
+
+## Publish release to SVN
+
+You need to migrate the RC artifacts that passed to this repository:
+https://dist.apache.org/repos/dist/release/airflow/
+(The migration should include renaming the files so that they no longer have the RC number in their filenames.)
+
+The best way of doing this is to svn cp between the two repos (this avoids having to upload the binaries again, and gives a clearer history in the svn commit logs):
+
+```shell script
+# First clone the repo
+export RC=1.10.4rc5
+export VERSION=${RC/rc?/}
+svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
+
+# Create new folder for the release
+cd airflow-release
+svn mkdir ${VERSION}
+cd ${VERSION}
+
+# Move the artifacts to svn folder & commit
+for f in ../../airflow-dev/$RC/*; do svn cp $f ${$(basename $f)/rc?/}; done
+svn commit -m "Release Airflow ${VERSION} from ${RC}"
+
+# Remove old release
+# http://www.apache.org/legal/release-policy.html#when-to-archive
+cd ..
+export PREVIOUS_VERSION=1.10.1
+svn rm ${PREVIOUS_VERSION}
+svn commit -m "Remove old release: ${PREVIOUS_VERSION}"
+```
+
+Verify that the packages appear in [airflow](https://dist.apache.org/repos/dist/release/airflow/)
+
+## Prepare PyPI "release" packages
+
+At this point we release an official package:
+
+- Build the package:
+
+    ```shell script
+    python setup.py compile_assets sdist bdist_wheel
+    ```
+
+- Verify the artifacts that would be uploaded:
+
+    ```shell script
+    twine check dist/*
+    ```
+
+- Upload the package to PyPi's test environment:
+
+    ```shell script
+    twine upload -r pypitest dist/*
+    ```
+
+- Verify that the test package looks good by downloading it and installing it into a virtual environment.
+    The package download link is available at: https://test.pypi.org/project/apache-airflow/#files
+
+- Upload the package to PyPi's production environment:
+
+    ```shell script
+    twine upload -r pypi dist/*
+    ```
+
+- Again, confirm that the package is available here: https://pypi.python.org/pypi/apache-airflow
+
+## Update CHANGELOG.md
+
+- Get a diff between the last version and the current version:
+
+    ```shell script
+    git log 1.8.0..1.9.0 --pretty=oneline
+    ```
+
+- Update CHANGELOG.md with the details, and commit it.
+
+- Re-Tag & Push the constraints files with the final release version.
+
+    ```shell script
+    git checkout constraints-${RC}
+    git tag -s "constraints-${VERSION}"
+    git push origin "constraints-${VERSION}"
+    ```
+
+- Push Tag for the final version
+
+    This step should only be done now and not before, because it triggers an automated build of
+    the production docker image, using the packages that are currently released in PyPI
+    (both airflow and latest provider packages).
+
+    ```shell script
+    git push origin ${VERSION}
+    ```
+
+## \[Optional\] - Manually prepare production Docker Image
+
+Production Docker images should be automatically built in 2-3 hours after the release tag has been
+pushed. If this did not happen - please login to DockerHub and check the status of builds:
+[Build Timeline](https://hub.docker.com/repository/docker/apache/airflow/timeline)
+
+In case you need, you can also build and push the images manually:
+
+Airflow 2+:
+
+```shell script
+export DOCKER_REPO=docker.io/apache/airflow
+for python_version in "3.6" "3.7" "3.8"
+(
+  export DOCKER_TAG=${VERSION}-python${python_version}
+  ./scripts/ci/images/ci_build_dockerhub.sh
+)
+```
+
+This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean".
+
+
+Airflow 1.10:
+
+```shell script
+for python_version in "2.7" "3.5" "3.6" "3.7" "3.8"
+do
+    ./breeze build-image --production-image --python ${python_version} \
+        --image-tag apache/airflow:${VERSION}-python${python_version} --build-cache-local
+    docker push apache/airflow:${VERSION}-python${python_version}
+done
+docker tag apache/airflow:${VERSION}-python3.6 apache/airflow:${VERSION}
+docker push apache/airflow:${VERSION}
+```
+
+## Publish documentation
+
+Documentation is an essential part of the product and should be made available to users.
+In our cases, documentation for the released versions is published in a separate repository - [`apache/airflow-site`](https://github.com/apache/airflow-site), but the documentation source code and build tools are available in the `apache/airflow` repository, so you have to coordinate between the two repositories to be able to build the documentation.
+
+Documentation for providers can be found in the ``/docs/apache-airflow`` directory.
+
+- First, copy the airflow-site repository and set the environment variable ``AIRFLOW_SITE_DIRECTORY``.
+
+    ```shell script
+    git clone https://github.com/apache/airflow-site.git airflow-site
+    cd airflow-site
+    export AIRFLOW_SITE_DIRECTORY="$(pwd)"
+    ```
+
+- Then you can go to the directory and build the necessary documentation packages
+
+    ```shell script
+    cd "${AIRFLOW_REPO_ROOT}"
+    ./breeze build-docs -- --package-filter apache-airflow --for-production
+    ```
+
+- Now you can preview the documentation.
+
+    ```shell script
+    ./docs/start_doc_server.sh
+    ```
+
+- Copy the documentation to the ``airflow-site`` repository, create commit and push changes.
+
+    ```shell script
+    ./docs/publish_docs.py --package apache-airflow
+    cd "${AIRFLOW_SITE_DIRECTORY}"
+    git commit -m "Add documentation for Apache Airflow ${VERSION}"
+    git push
+    ```
+
+## Notify developers of release
+
+- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
+the artifacts have been published:
+
+Subject:
+
+```shell script
+cat <<EOF
+Airflow ${VERSION} is released
+EOF
+```
+
+Body:
+
+```shell script
+cat <<EOF
+Dear Airflow community,
+
+I'm happy to announce that Airflow ${VERSION} was just released.
+
+The source release, as well as the binary "sdist" release, are available
+here:
+
+https://dist.apache.org/repos/dist/release/airflow/${VERSION}/
+
+We also made this version available on PyPi for convenience (`pip install apache-airflow`):
+
+https://pypi.python.org/pypi/apache-airflow
+
+The documentation is available on:
+https://airflow.apache.org/
+https://airflow.apache.org/docs/apache-airflow/${VERSION}/
+
+Find the CHANGELOG here for more details:
+
+https://airflow.apache.org/changelog.html#airflow-1-10-2-2019-01-19
+
+Cheers,
+<your name>
+EOF
+```
+
+## Update Announcements page
+
+Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index 335e713..b0ffa02 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -51,7 +51,7 @@ does not contain ``build-essential``. If you need compiler like gcc or g++ or ma
 are not found in the image and it is recommended that you follow the "customize" route instead.
 
 How to extend the image - it is something you are most likely familiar with - simply
-build a new image using Dockerfile's ``FROM:`` directive and add whatever you need. Then you can add your
+build a new image using Dockerfile's ``FROM`` directive and add whatever you need. Then you can add your
 Debian dependencies with ``apt`` or PyPI dependencies with ``pip install`` or any other stuff you need.
 
 You should be aware, about a few things:
@@ -64,7 +64,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   USER root
   RUN apt-get update \
     && apt-get install -y --no-install-recommends \
@@ -81,7 +81,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   RUN pip install --no-cache-dir --user my-awesome-pip-dependency-to-add
 
 
@@ -92,7 +92,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   USER root
   RUN apt-get update \
     && apt-get install -y --no-install-recommends \
@@ -116,7 +116,7 @@ suited to prepare optimized production images.
 The advantage of this method is that it produces optimized image even if you need some compile-time
 dependencies that are not needed in the final image. You need to use Airflow Sources to build such images
 from the `official distribution folder of Apache Airflow <https://downloads.apache.org/airflow/>`_ for the
-released versions, or checked out from the Github project if you happen to do it from git sources.
+released versions, or checked out from the GitHub project if you happen to do it from git sources.
 
 The easiest way to build the image image is to use ``breeze`` script, but you can also build such customized
 image by running appropriately crafted docker build in which you specify all the ``build-args``
@@ -133,16 +133,16 @@ additional apt dev and runtime dependencies.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
-    --build-arg ADDITIONAL_PYTHON_DEPS="pandas"
-    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++"
-    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
+    --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \
+    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" \
     --tag my-image
 
 
@@ -166,7 +166,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
   docker build . -f Dockerfile \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -236,7 +236,7 @@ Building the image (after copying the files downloaded to the "docker-context-fi
 
   ./breeze build-image \
       --production-image --python 3.7 --install-airflow-version=1.10.14 \
-      --disable-mysql-client-installation --disable-pip-cache --add-local-pip-wheels \
+      --disable-mysql-client-installation --disable-pip-cache --install-from-local-files-when-building \
       --constraints-location="/docker-context-files/constraints-1-10.txt"
 
 or
@@ -246,7 +246,7 @@ or
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -254,7 +254,7 @@ or
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg INSTALL_MYSQL_CLIENT="false" \
     --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \
-    --build-arg AIRFLOW_LOCAL_PIP_WHEELS="true" \
+    --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="true" \
     --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-1-10.txt"
 
 
@@ -263,7 +263,7 @@ Customizing & extending the image together
 
 You can combine both - customizing & extending the image. You can build the image first using
 ``customize`` method (either with docker command or with ``breeze`` and then you can ``extend``
-the resulting image using ``FROM:`` any dependencies you want.
+the resulting image using ``FROM`` any dependencies you want.
 
 Customizing PYPI installation
 .............................
@@ -389,102 +389,116 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | Build argument                           | Default value                            | Description                              |
 +==========================================+==========================================+==========================================+
-| ``PYTHON_BASE_IMAGE``                    | ``python:3.6-slim-buster``               | Base python image                        |
+| ``PYTHON_BASE_IMAGE``                    | ``python:3.6-slim-buster``               | Base python image.                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``PYTHON_MAJOR_MINOR_VERSION``           | ``3.6``                                  | major/minor version of Python (should    |
-|                                          |                                          | match base image)                        |
+|                                          |                                          | match base image).                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_VERSION``                      | ``2.0.0.dev0``                           | version of Airflow                       |
+| ``AIRFLOW_VERSION``                      | ``2.0.0.dev0``                           | version of Airflow.                      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_REPO``                         | ``apache/airflow``                       | the repository from which PIP            |
-|                                          |                                          | dependencies are pre-installed           |
+|                                          |                                          | dependencies are pre-installed.          |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_BRANCH``                       | ``master``                               | the branch from which PIP dependencies   |
-|                                          |                                          | are pre-installed initially              |
+|                                          |                                          | are pre-installed initially.             |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_CONSTRAINTS_LOCATION``         |                                          | If not empty, it will override the       |
+|                                          |                                          | source of the constraints with the       |
+|                                          |                                          | specified URL or file. Note that the     |
+|                                          |                                          | file has to be in docker context so      |
+|                                          |                                          | it's best to place such file in          |
+|                                          |                                          | one of the folders included in           |
+|                                          |                                          | .dockerignore.                           |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | reference (branch or tag) from GitHub    |
-|                                          |                                          | repository from which constraints are    |
-|                                          |                                          | used. By default it is set to            |
-|                                          |                                          | ``constraints-master`` but can be        |
-|                                          |                                          | ``constraints-1-10`` for 1.10.* versions |
-|                                          |                                          | or it could point to specific version    |
-|                                          |                                          | for example ``constraints-1.10.14``      |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | Reference (branch or tag) from GitHub    |
+|                                          |                                          | where constraints file is taken from     |
+|                                          |                                          | It can be ``constraints-master`` but     |
+|                                          |                                          | also can be ``constraints-1-10`` for     |
+|                                          |                                          | 1.10.* installation. In case of building |
+|                                          |                                          | specific version you want to point it    |
+|                                          |                                          | to specific tag, for example             |
+|                                          |                                          | ``constraints-1.10.14``.                 |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_EXTRAS``                       | (see Dockerfile)                         | Default extras with which airflow is     |
-|                                          |                                          | installed                                |
+|                                          |                                          | installed.                               |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_AIRFLOW_VIA_PIP``              | ``false``                                | If set to true, Airflow is installed via |
-|                                          |                                          | pip install. if you want to install      |
-|                                          |                                          | Airflow from externally provided binary  |
-|                                          |                                          | package you can set it to false, place   |
-|                                          |                                          | the package in ``docker-context-files``  |
-|                                          |                                          | and set ``AIRFLOW_LOCAL_PIP_WHEELS`` to  |
-|                                          |                                          | true. You have to also set to true the   |
+| ``INSTALL_FROM_PYPI``                    | ``true``                                 | If set to true, Airflow is installed     |
+|                                          |                                          | from PyPI. if you want to install        |
+|                                          |                                          | Airflow from self-build package          |
+|                                          |                                          | you can set it to false, put package in  |
+|                                          |                                          | ``docker-context-files`` and set         |
+|                                          |                                          | ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` to |
+|                                          |                                          | ``true``. For this you have to also keep |
 |                                          |                                          | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` flag |
+|                                          |                                          | set to ``false``.                        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``true``                                 | Allows to pre-cache airflow PIP packages |
+| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``false``                                | Allows to pre-cache airflow PIP packages |
 |                                          |                                          | from the GitHub of Apache Airflow        |
 |                                          |                                          | This allows to optimize iterations for   |
-|                                          |                                          | Image builds and speeds up CI builds     |
-|                                          |                                          | But in some corporate environments it    |
-|                                          |                                          | might be forbidden to download anything  |
-|                                          |                                          | from public repositories.                |
+|                                          |                                          | Image builds and speeds up CI builds.    |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_LOCAL_PIP_WHEELS``             | ``false``                                | If set to true, Airflow and it's         |
-|                                          |                                          | dependencies are installed during build  |
-|                                          |                                          | from locally downloaded .whl             |
-|                                          |                                          | files placed in the                      |
-|                                          |                                          | ``docker-context-files``.                |
+| ``INSTALL_FROM_DOCKER_CONTEXT_FILES``    | ``false``                                | If set to true, Airflow, providers and   |
+|                                          |                                          | all dependencies are installed from      |
+|                                          |                                          | from locally built/downloaded            |
+|                                          |                                          | .whl and .tar.gz files placed in the     |
+|                                          |                                          | ``docker-context-files``. In certain     |
+|                                          |                                          | corporate environments, this is required |
+|                                          |                                          | to install airflow from such pre-vetted  |
+|                                          |                                          | packages rather than from PyPI. For this |
+|                                          |                                          | to work, also set ``INSTALL_FROM_PYPI``  |
+|                                          |                                          | to false.                                |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_AIRFLOW_EXTRAS``            |                                          | Optional additional extras with which    |
-|                                          |                                          | airflow is installed                     |
+|                                          |                                          | airflow is installed.                    |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_PYTHON_DEPS``               |                                          | Optional python packages to extend       |
-|                                          |                                          | the image with some extra dependencies   |
+|                                          |                                          | the image with some extra dependencies.  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``DEV_APT_COMMAND``                      | (see Dockerfile)                         | Dev apt command executed before dev deps |
-|                                          |                                          | are installed in the Build image         |
+|                                          |                                          | are installed in the Build image.        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_COMMAND``           |                                          | Additional Dev apt command executed      |
 |                                          |                                          | before dev dep are installed             |
-|                                          |                                          | in the Build image. Should start with && |
+|                                          |                                          | in the Build image. Should start with    |
+|                                          |                                          | ``&&``.                                  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``DEV_APT_DEPS``                         | (see Dockerfile)                         | Dev APT dependencies installed           |
-|                                          |                                          | in the Build image                       |
+|                                          |                                          | in the Build image.                      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_DEPS``              |                                          | Additional apt dev dependencies          |
-|                                          |                                          | installed in the Build image             |
+|                                          |                                          | installed in the Build image.            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_ENV``               |                                          | Additional env variables defined         |
-|                                          |                                          | when installing dev deps                 |
+|                                          |                                          | when installing dev deps.                |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``RUNTIME_APT_COMMAND``                  | (see Dockerfile)                         | Runtime apt command executed before deps |
-|                                          |                                          | are installed in the Main image          |
+|                                          |                                          | are installed in the Main image.         |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_COMMAND``       |                                          | Additional Runtime apt command executed  |
 |                                          |                                          | before runtime dep are installed         |
-|                                          |                                          | in the Main image. Should start with &&  |
+|                                          |                                          | in the Main image. Should start with     |
+|                                          |                                          | ``&&``.                                  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``RUNTIME_APT_DEPS``                     | (see Dockerfile)                         | Runtime APT dependencies installed       |
-|                                          |                                          | in the Main image                        |
+|                                          |                                          | in the Main image.                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_DEPS``          |                                          | Additional apt runtime dependencies      |
-|                                          |                                          | installed in the Main image              |
+|                                          |                                          | installed in the Main image.             |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_ENV``           |                                          | Additional env variables defined         |
-|                                          |                                          | when installing runtime deps             |
+|                                          |                                          | when installing runtime deps.            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_HOME``                         | ``/opt/airflow``                         | Airflow’s HOME (that’s where logs and    |
-|                                          |                                          | sqlite databases are stored)             |
+|                                          |                                          | sqlite databases are stored).            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_UID``                          | ``50000``                                | Airflow user UID                         |
+| ``AIRFLOW_UID``                          | ``50000``                                | Airflow user UID.                        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_GID``                          | ``50000``                                | Airflow group GID. Note that most files  |
 |                                          |                                          | created on behalf of airflow user belong |
 |                                          |                                          | to the ``root`` group (0) to keep        |
-|                                          |                                          | OpenShift Guidelines compatibility       |
+|                                          |                                          | OpenShift Guidelines compatibility.      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                        | Home directory of the Airflow user       |
+| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                        | Home directory of the Airflow user.      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``CASS_DRIVER_BUILD_CONCURRENCY``        | ``8``                                    | Number of processors to use for          |
 |                                          |                                          | cassandra PIP install (speeds up         |
@@ -493,7 +507,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``INSTALL_MYSQL_CLIENT``                 | ``true``                                 | Whether MySQL client should be installed |
 |                                          |                                          | The mysql extra is removed from extras   |
-|                                          |                                          | if the client is not installed           |
+|                                          |                                          | if the client is not installed.          |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 
 There are build arguments that determine the installation mechanism of Apache Airflow for the
@@ -503,59 +517,33 @@ production image. There are three types of build:
 * You can build the image from released PyPi airflow package (used to build the official Docker image)
 * You can build the image from any version in GitHub repository(this is used mostly for system testing).
 
-+-----------------------------------+-----------------------------------+
-| Build argument                    | What to specify                   |
-+===================================+===================================+
-| ``AIRFLOW_INSTALL_SOURCES``       | Should point to the sources of    |
-|                                   | of Apache Airflow. It can be      |
-|                                   | either "." for installation from  |
-|                                   | local sources, "apache-airflow"   |
-|                                   | for installation from packages    |
-|                                   | and URL to installation from      |
-|                                   | GitHub repository (see below)     |
-|                                   | to install from any GitHub        |
-|                                   | version                           |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_INSTALL_VERSION``       | Optional - might be used for      |
-|                                   | package installation case to      |
-|                                   | set Airflow version for example   |
-|                                   | "==1.10.14". Remember to also     |
-|                                   | Set ``AIRFLOW_VERSION``           |
-|                                   | when you use it.                  |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | reference (branch or tag) from    |
-|                                   | GitHub where constraints file     |
-|                                   | is taken from. By default it is   |
-|                                   | ``constraints-master`` but can be |
-|                                   | ``constraints-1-10`` for 1.10.*   |
-|                                   | constraint or if you want to      |
-|                                   | point to specific version         |
-|                                   | might be ``constraints-1.10.14``  |
-+-----------------------------------+-----------------------------------+
-| ``SLUGIFY_USES_TEXT_UNIDECODE``   | In case of of installing airflow  |
-|                                   | 1.10.2 or 1.10.1 you need to      |
-|                                   | set this arg to ``yes``.          |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_WWW``                   | In case of Airflow 2.0 it should  |
-|                                   | be "www", in case of Airflow 1.10 |
-|                                   | series it should be "www_rbac".   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_SOURCES_FROM``          | Sources of Airflow. Set it to     |
-|                                   | "empty" to avoid costly           |
-|                                   | Docker context copying            |
-|                                   | in case of installation from      |
-|                                   | the package or from GitHub URL.   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_SOURCES_TO``            | Target for Airflow sources. Set   |
-|                                   | to "/empty" to avoid costly       |
-|                                   | Docker context copying            |
-|                                   | in case of installation from      |
-|                                   | the package or from GitHub URL.   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| Build argument                    | Default                | What to specify                                                                   |
++===================================+========================+===================================================================================+
+| ``AIRFLOW_INSTALLATION_METHOD``   | ``apache-airflow``     | Should point to the installation method of Apache Airflow. It can be              |
+|                                   |                        | ``apache-airflow`` for installation from packages and URL to installation from    |
+|                                   |                        | GitHub repository tag or branch or "." to install from sources.                   |
+|                                   |                        | Note that installing from local sources requires appropriate values of the        |
+|                                   |                        | ``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described below. |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_INSTALL_VERSION``       |                        | Optional - might be used for package installation of different Airflow version    |
+|                                   |                        | for example"==1.10.14". For consistency, you should also set``AIRFLOW_VERSION``   |
+|                                   |                        | to the same value AIRFLOW_VERSION is embedded as label in the image created.      |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | ``constraints-master`` | Reference (branch or tag) from GitHub where constraints file is taken from.       |
+|                                   |                        | It can be ``constraints-master`` but also can be``constraints-1-10`` for          |
+|                                   |                        | 1.10.*  installations. In case of building specific version                       |
+|                                   |                        | you want to point it to specific tag, for example ``constraints-1.10.14``         |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_WWW``                   | ``www``                | In case of Airflow 2.0 it should be "www", in case of Airflow 1.10                |
+|                                   |                        | series it should be "www_rbac".                                                   |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_SOURCES_FROM``          | ``empty``              | Sources of Airflow. Set it to "." when you install airflow from                   |
+|                                   |                        | local sources.                                                                    |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_SOURCES_TO``            | ``/empty``             | Target for Airflow sources. Set to "/opt/airflow" when                            |
+|                                   |                        | you want to install airflow from local sources.                                   |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
 
 This builds production image in version 3.6 with default extras from the local sources (master version
 of 2.0 currently):
@@ -572,7 +560,7 @@ constraints taken from constraints-1-10-12 branch in GitHub.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="https://github.com/apache/airflow/archive/1.10.14.tar.gz#egg=apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/1.10.14.tar.gz#egg=apache-airflow" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -587,7 +575,7 @@ of v1-10-test branch.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
@@ -603,14 +591,14 @@ additional python dependencies and pre-installed pip dependencies from 1.10.14 t
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.14" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \
     --build-arg ADDITIONAL_PYTHON_DEPS="sshtunnel oauth2client"
 
 This builds the production image in version 3.7 with additional airflow extras from 1.10.14 PyPI package and
@@ -621,14 +609,14 @@ additional apt dev and runtime dependencies.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
-    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \
     --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless"
 
 
diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index a0ad0e6..e5e230b 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -49,46 +49,83 @@ echo "DOCKER_TAG=${DOCKER_TAG}"
 echo "Detected PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}"
 echo
 
-(
-    export INSTALL_FROM_PYPI="true"
-    export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
-    export INSTALL_PROVIDERS_FROM_SOURCES="true"
-    export AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
-    export DOCKER_CACHE="pulled"
-    # shellcheck source=scripts/ci/libraries/_script_init.sh
-    . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
-
+if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
     echo
-    echo "Building and pushing CI image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+    echo "Building airflow from branch or non-release tag: ${DOCKER_TAG}"
     echo
-    rm -rf "${BUILD_CACHE_DIR}"
-    build_images::prepare_ci_build
-    build_images::rebuild_ci_image_if_needed
-    if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
-        # Do not push if we are building a tagged version
-        push_pull_remove_images::push_ci_images
-    fi
-)
+    # Only build and push CI image for the nightly-master, v1-10-test and v2-0-test branches
+    # for tagged releases we build everything from PyPI, so we do not need CI images
+    # For development images, we have to build all packages from current sources because we want to produce
+    # `Latest and greatest` image from those branches. We need to build and push CI image as well as PROD
+    # image but we need to build CI image first, in order to use it to prepare provider packages
+    # The CI image provides an environment where we can reproducibly download the right .whl packages
+    # and build the provider packages and then build the production image using those .whl packages
+    # prepared. This is as close as it can get to production images - everything is build from
+    # packages, but not from PyPI - those packages are built locally using the latest sources!
 
-(
-    export INSTALL_FROM_PYPI="false"
-    export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+    # Note - we need sub-processes here, because we can run _script_init.sh only once per process
+    # and it determines how to build the image - since we are building two images here
+    # we need to run those in sub-processes
+    (
+        export INSTALL_FROM_PYPI="true"
+        export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
+        export INSTALL_PROVIDERS_FROM_SOURCES="true"
+        export AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
+        export DOCKER_CACHE="pulled"
+        # shellcheck source=scripts/ci/libraries/_script_init.sh
+        . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+        echo
+        echo "Building and pushing CI image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+        echo
+        rm -rf "${BUILD_CACHE_DIR}"
+        rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
+        build_images::prepare_ci_build
+        build_images::rebuild_ci_image_if_needed
+        push_pull_remove_images::push_ci_images
+    )
+    (
+        export INSTALL_FROM_PYPI="false"
+        export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+        export INSTALL_PROVIDERS_FROM_SOURCES="false"
+        export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+        export DOCKER_CACHE="pulled"
+        # shellcheck source=scripts/ci/libraries/_script_init.sh
+        . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+        echo
+        echo "Building and pushing PROD image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+        echo
+        rm -rf "${BUILD_CACHE_DIR}"
+        rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
+        build_images::prepare_prod_build
+        build_images::build_prod_images_from_packages
+        push_pull_remove_images::push_prod_images
+    )
+else
+    echo
+    echo "Building airflow from release tag: ${DOCKER_TAG}"
+    echo
+    # This is an imaae built from the "release" tag (either RC or final one).
+    # In this case all packages are taken from PyPI rather than from locally built sources
+    export INSTALL_FROM_PYPI="true"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
     export INSTALL_PROVIDERS_FROM_SOURCES="false"
     export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
-    export DOCKER_CACHE="pulled"
+    export DOCKER_CACHE="local"
+    # Name the image based on the TAG rather than based on the branch name
+    export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
+    export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
+    export AIRFLOW_SOURCES_FROM="empty"
+    export AIRFLOW_SOURCES_TO="/empty"
+    export INSTALL_AIRFLOW_VERSION="${DOCKER_TAG%-python*}"
 
-    if [[ "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
-        # Disable cache and set name of the tag as image name if we are building a tagged version
-        export DOCKER_CACHE="disabled"
-        export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
-    fi
     # shellcheck source=scripts/ci/libraries/_script_init.sh
     . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
     echo
     echo "Building and pushing PROD image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
     echo
     rm -rf "${BUILD_CACHE_DIR}"
+    rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
     build_images::prepare_prod_build
-    build_images::build_prod_images_from_packages
+    build_images::build_prod_images
     push_pull_remove_images::push_prod_images
-)
+fi
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 8f48c16..296124f 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -30,34 +30,53 @@ function build_images::add_build_args_for_remote_install() {
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
             "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}"
         )
-    fi
-    if [[ "${AIRFLOW_CONSTRAINTS_LOCATION}" != "" ]]; then
-        EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
-        )
-    fi
-    if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
-        # All types of references/versions match this regexp for 1.10 series
-        # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last
-        # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+
-        AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
-        if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
-            # For v1_10_* branches use constraints-1-10 branch
+    else
+        if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
+            # All types of references/versions match this regexp for 1.10 series
+            # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last
+            # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+
+            AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
+            if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
+                # For v1_10_* branches use constraints-1-10 branch
+                EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
+                    "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10"
+                )
+            else
+                EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
+                    # For specified minor version of 1.10 or v1 branch use specific reference constraints
+                    "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
+                )
+            fi
+        elif  [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10"
+                # For specified minor version of 2.0 or v2 branch use specific reference constraints
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
             )
         else
+            # For all other we just get the default constraint branch coming from the _initialization.sh
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                # For specified minor version of 1.10 use specific reference constraints
-                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
             )
         fi
-        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
-    else
-        # For all other (master, 2.0+) we just get the default constraint branch
+    fi
+    if [[ "${AIRFLOW_CONSTRAINTS_LOCATION}" != "" ]]; then
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
+            "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
         )
+    fi
+    # Depending on the version built, we choose the right branch for preloading the packages from
+    # If we run build for v1-10-test builds we should choose v1-10-test, for v2-0-test we choose v2-0-test
+    # all other builds when you choose a specific version (1.0 or 2.0 series) should choose stable branch
+    # to preload. For all other builds we use the default branch defined in _initialization.sh
+    if [[ ${AIRFLOW_VERSION} == 'v1-10-test' ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
+    elif [[ ${AIRFLOW_VERSION} =~ v?1.* ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-stable"
+    elif [[ ${AIRFLOW_VERSION} == 'v2-0-test' ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v2-0-test"
+    elif [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v2-0-stable"
+    else
         AIRFLOW_BRANCH_FOR_PYPI_PRELOADING=${DEFAULT_BRANCH}
     fi
 }
@@ -638,23 +657,18 @@ function build_images::prepare_prod_build() {
     if [[ -n "${INSTALL_AIRFLOW_REFERENCE=}" ]]; then
         # When --install-airflow-reference is used then the image is build from github tag
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
-            "--build-arg" "AIRFLOW_INSTALL_SOURCES=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
+            "--build-arg" "AIRFLOW_INSTALLATION_METHOD=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_REFERENCE}"
         build_images::add_build_args_for_remote_install
     elif [[ -n "${INSTALL_AIRFLOW_VERSION=}" ]]; then
         # When --install-airflow-version is used then the image is build from PIP package
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
-            "--build-arg" "AIRFLOW_INSTALL_SOURCES=apache-airflow"
+            "--build-arg" "AIRFLOW_INSTALLATION_METHOD=apache-airflow"
             "--build-arg" "AIRFLOW_INSTALL_VERSION===${INSTALL_AIRFLOW_VERSION}"
             "--build-arg" "AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION}"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
-        if [[ ${AIRFLOW_VERSION} == "1.10.2" || ${AIRFLOW_VERSION} == "1.10.1" ]]; then
-            EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                "--build-arg" "SLUGIFY_USES_TEXT_UNIDECODE=yes"
-            )
-        fi
         build_images::add_build_args_for_remote_install
     else
         # When no airflow version/reference is specified, production image is built from local sources
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 5f2ec6d..ff7abe9 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -148,11 +148,25 @@ function initialization::initialize_base_variables() {
     export INSTALLED_EXTRAS="async,amazon,celery,kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,google,azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
     readonly INSTALLED_EXTRAS
 
-    PIP_VERSION="20.2.4"
+    # default version of PIP USED (This has to be < 20.3 until https://github.com/apache/airflow/issues/12838 is solved)
+    PIP_VERSION=${PIP_VERSION:="20.2.4"}
     export PIP_VERSION
 
-    WHEEL_VERSION="0.35.1"
+    # We also pin version of wheel used to get consistent builds
+    WHEEL_VERSION=${WHEEL_VERSION:="0.36.1"}
     export WHEEL_VERSION
+
+    # Sources by default are installed from local sources when using breeze/ci
+    AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM:="."}
+    export AIRFLOW_SOURCES_FROM
+
+    # They are copied to /opt/airflow by default (breeze and ci)
+    AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO:="/opt/airflow"}
+    export AIRFLOW_SOURCES_TO
+
+    # And installed from there (breeze and ci)
+    AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION:="."}
+    export AIRFLOW_INSTALL_VERSION
 }
 
 # Determine current branch
@@ -461,6 +475,11 @@ function initialization::initialize_test_variables() {
     export TEST_TYPE=${TEST_TYPE:=""}
 }
 
+function initialization::initialize_package_variables() {
+    export PACKAGE_FORMAT=${PACKAGE_FORMAT:="wheel"}
+}
+
+
 function initialization::initialize_build_image_variables() {
     REMOTE_IMAGE_CONTAINER_ID_FILE="${AIRFLOW_SOURCES}/manifests/remote-airflow-manifest-image"
     LOCAL_IMAGE_BUILD_CACHE_HASH_FILE="${AIRFLOW_SOURCES}/manifests/local-build-cache-hash"
@@ -504,6 +523,7 @@ function initialization::initialize_common_environment() {
     initialization::initialize_git_variables
     initialization::initialize_github_variables
     initialization::initialize_test_variables
+    initialization::initialize_package_variables
     initialization::initialize_build_image_variables
 }
 
@@ -535,7 +555,6 @@ DockerHub variables:
 Mount variables:
 
     MOUNT_LOCAL_SOURCES: ${MOUNT_LOCAL_SOURCES}
-    MOUNT_FILES: ${MOUNT_FILES}
 
 Force variables:
 
@@ -597,15 +616,13 @@ Detected CI build environment:
     CI_BUILD_ID=${CI_BUILD_ID}
     CI_JOB_ID=${CI_JOB_ID}
     CI_EVENT_TYPE=${CI_EVENT_TYPE}
-    CI_SOURCE_REPO=${CI_SOURCE_REPO}
-    CI_SOURCE_BRANCH=${CI_SOURCE_BRANCH}
 
 Initialization variables:
 
     INIT_SCRIPT_FILE: ${INIT_SCRIPT_FILE=}
     LOAD_DEFAULT_CONNECTIONS: ${LOAD_DEFAULT_CONNECTIONS}
     LOAD_EXAMPLES: ${LOAD_EXAMPLES}
-    INSTALL_WHEELS: ${INSTALL_WHEELS=}
+    INSTALL_PACKAGES_FROM_DIST: ${INSTALL_PACKAGES_FROM_DIST=}
     DISABLE_RBAC: ${DISABLE_RBAC}
 
 Test variables:
@@ -629,30 +646,6 @@ function initialization::get_environment_for_builds_on_ci() {
         export CI_JOB_ID="${GITHUB_JOB}"
         export CI_EVENT_TYPE="${GITHUB_EVENT_NAME}"
         export CI_REF="${GITHUB_REF:=}"
-        if [[ ${CI_EVENT_TYPE:=} == "pull_request" ]]; then
-            # default name of the source repo (assuming it's forked without rename)
-            export SOURCE_AIRFLOW_REPO=${SOURCE_AIRFLOW_REPO:="airflow"}
-            # For Pull Requests it's ambiguous to find the PR and we need to
-            # assume that name of repo is airflow but it could be overridden in case it's not
-            export CI_SOURCE_REPO="${GITHUB_ACTOR}/${SOURCE_AIRFLOW_REPO}"
-            export CI_SOURCE_BRANCH="${GITHUB_HEAD_REF}"
-            BRANCH_EXISTS=$(git ls-remote --heads \
-                "https://github.com/${CI_SOURCE_REPO}.git" "${CI_SOURCE_BRANCH}" || true)
-            if [[ -z ${BRANCH_EXISTS=} ]]; then
-                verbosity::print_info
-                verbosity::print_info "https://github.com/${CI_SOURCE_REPO}.git Branch ${CI_SOURCE_BRANCH} does not exist"
-                verbosity::print_info
-                verbosity::print_info
-                verbosity::print_info "Fallback to https://github.com/${CI_TARGET_REPO}.git Branch ${CI_TARGET_BRANCH}"
-                verbosity::print_info
-                # Fallback to the target repository if the repo does not exist
-                export CI_SOURCE_REPO="${CI_TARGET_REPO}"
-                export CI_SOURCE_BRANCH="${CI_TARGET_BRANCH}"
-            fi
-        else
-            export CI_SOURCE_REPO="${CI_TARGET_REPO}"
-            export CI_SOURCE_BRANCH="${CI_TARGET_BRANCH}"
-        fi
     else
         # CI PR settings
         export CI_TARGET_REPO="${CI_TARGET_REPO="apache/airflow"}"
@@ -661,9 +654,6 @@ function initialization::get_environment_for_builds_on_ci() {
         export CI_JOB_ID="${CI_JOB_ID="0"}"
         export CI_EVENT_TYPE="${CI_EVENT_TYPE="pull_request"}"
         export CI_REF="${CI_REF="refs/head/master"}"
-
-        export CI_SOURCE_REPO="${CI_SOURCE_REPO="apache/airflow"}"
-        export CI_SOURCE_BRANCH="${DEFAULT_BRANCH="master"}"
     fi
 
     if [[ ${VERBOSE} == "true" && ${PRINT_INFO_FROM_SCRIPTS} == "true" ]]; then
@@ -726,8 +716,8 @@ function initialization::make_constants_read_only() {
     readonly IMAGE_TAG
 
     readonly AIRFLOW_PRE_CACHED_PIP_PACKAGES
-    readonly INSTALL_AIRFLOW_VIA_PIP
-    readonly AIRFLOW_LOCAL_PIP_WHEELS
+    readonly INSTALL_FROM_PYPI
+    readonly INSTALL_FROM_DOCKER_CONTEXT_FILES
     readonly AIRFLOW_CONSTRAINTS_REFERENCE
     readonly AIRFLOW_CONSTRAINTS_LOCATION
 
diff --git a/scripts/ci/libraries/_parameters.sh b/scripts/ci/libraries/_parameters.sh
index 566585e..7f15990 100644
--- a/scripts/ci/libraries/_parameters.sh
+++ b/scripts/ci/libraries/_parameters.sh
@@ -26,7 +26,6 @@ function parameters::save_to_file() {
     # shellcheck disable=SC2005
     echo "$(eval echo "\$$1")" >"${BUILD_CACHE_DIR}/.$1"
 }
-
 # check if parameter set for the variable is allowed (should be on the _breeze_allowed list)
 # parameters:
 # $1 - name of the variable
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index a5a827a..19cacfe 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -27,11 +27,10 @@
 function add_trap() {
     trap="${1}"
     shift
-    for signal in "${@}"
-    do
+    for signal in "${@}"; do
         # adding trap to exiting trap
         local handlers
-        handlers="$( trap -p "${signal}" | cut -f2 -d \' )"
+        handlers="$(trap -p "${signal}" | cut -f2 -d \')"
         # shellcheck disable=SC2064
         trap "${trap};${handlers}" "${signal}"
     done
@@ -52,7 +51,16 @@ function assert_in_container() {
 }
 
 function in_container_script_start() {
-    OUT_FILE_PRINTED_ON_ERROR=$(mktemp)
+    OUTPUT_PRINTED_ONLY_ON_ERROR=$(mktemp)
+    export OUTPUT_PRINTED_ONLY_ON_ERROR
+    readonly OUTPUT_PRINTED_ONLY_ON_ERROR
+
+    if [[ ${VERBOSE=} == "true" ]]; then
+        echo
+        echo "Output is redirected to ${OUTPUT_PRINTED_ONLY_ON_ERROR} and will be printed on error only"
+        echo
+    fi
+
     if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
         set -x
     fi
@@ -62,14 +70,14 @@ function in_container_script_end() {
     #shellcheck disable=2181
     EXIT_CODE=$?
     if [[ ${EXIT_CODE} != 0 ]]; then
-        if [[ "${PRINT_INFO_FROM_SCRIPTS=="true"}" == "true" ]] ;then
-            if [[ -f ${OUT_FILE_PRINTED_ON_ERROR} ]]; then
+        if [[ "${PRINT_INFO_FROM_SCRIPTS="true"}" == "true" ]]; then
+            if [[ -f "${OUTPUT_PRINTED_ONLY_ON_ERROR}" ]]; then
                 echo "###########################################################################################"
                 echo
                 echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container (See above for error message). Below is the output of the last action! ${COLOR_RESET}"
                 echo
                 echo "${COLOR_BLUE}***  BEGINNING OF THE LAST COMMAND OUTPUT *** ${COLOR_RESET}"
-                cat "${OUT_FILE_PRINTED_ON_ERROR}"
+                cat "${OUTPUT_PRINTED_ONLY_ON_ERROR}"
                 echo "${COLOR_BLUE}***  END OF THE LAST COMMAND OUTPUT ***  ${COLOR_RESET}"
                 echo
                 echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container. The actual error might be above the output!  ${COLOR_RESET}"
@@ -137,9 +145,9 @@ function in_container_fix_ownership() {
         if [[ ${VERBOSE} == "true" ]]; then
             echo "Fixing ownership of mounted files"
         fi
-        sudo find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null \
-            | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference ||
-                true >/dev/null 2>&1
+        sudo find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null |
+            sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference ||
+            true >/dev/null 2>&1
         if [[ ${VERBOSE} == "true" ]]; then
             echo "Fixed ownership of mounted files"
         fi
@@ -157,7 +165,7 @@ function in_container_clear_tmp() {
 }
 
 function in_container_go_to_airflow_sources() {
-    pushd "${AIRFLOW_SOURCES}"  &>/dev/null || exit 1
+    pushd "${AIRFLOW_SOURCES}" &>/dev/null || exit 1
 }
 
 function in_container_basic_sanity_check() {
@@ -176,7 +184,7 @@ function start_output_heartbeat() {
     echo "Starting output heartbeat"
     echo
 
-    bash 2> /dev/null <<EOF &
+    bash 2>/dev/null <<EOF &
 while true; do
   echo "\$(date): ${MESSAGE} "
   sleep ${INTERVAL}
@@ -187,41 +195,7 @@ EOF
 
 function stop_output_heartbeat() {
     kill "${HEARTBEAT_PID}" || true
-    wait "${HEARTBEAT_PID}" || true 2> /dev/null
-}
-
-function setup_kerberos() {
-    FQDN=$(hostname)
-    ADMIN="admin"
-    PASS="airflow"
-    KRB5_KTNAME=/etc/airflow.keytab
-
-    sudo cp "${AIRFLOW_SOURCES}/scripts/in_container/krb5/krb5.conf" /etc/krb5.conf
-
-    echo -e "${PASS}\n${PASS}" | \
-        sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "addprinc -randkey airflow/${FQDN}" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs/kadmin_1.log" >/dev/null
-    RES_1=$?
-
-    sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "ktadd -k ${KRB5_KTNAME} airflow" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs/kadmin_2.log" >/dev/null
-    RES_2=$?
-
-    sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "ktadd -k ${KRB5_KTNAME} airflow/${FQDN}" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs``/kadmin_3.log" >/dev/null
-    RES_3=$?
-
-    if [[ ${RES_1} != 0 || ${RES_2} != 0 || ${RES_3} != 0 ]]; then
-        echo
-        echo "Error when setting up Kerberos: ${RES_1} ${RES_2} ${RES_3}}!"
-        echo
-        exit 1
-    else
-        echo
-        echo "Kerberos enabled and working."
-        echo
-        sudo chmod 0644 "${KRB5_KTNAME}"
-    fi
+    wait "${HEARTBEAT_PID}" || true 2>/dev/null
 }
 
 function dump_airflow_logs() {
@@ -237,17 +211,217 @@ function dump_airflow_logs() {
     echo "###########################################################################################"
 }
 
-function install_released_airflow_version() {
+function install_airflow_from_wheel() {
+    local extras
+    extras="${1}"
+    local airflow_package
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-*.whl')
+    echo
+    echo "Found package: ${airflow_package}. Installing."
+    echo
+    if [[ -z "${airflow_package}" ]]; then
+        >&2 echo
+        >&2 echo "ERROR! Could not find airflow wheel package to install in dist"
+        >&2 echo
+        exit 4
+    fi
+    pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_remaining_dependencies() {
+    pip install apache-beam[gcp] >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function uninstall_airflow() {
+    echo
+    echo "Uninstalling airflow"
+    echo
     pip uninstall -y apache-airflow || true
+    echo
+    echo "Remove all AIRFLOW_HOME remnants"
+    echo
     find /root/airflow/ -type f -print0 | xargs -0 rm -f --
-    if [[ ${1} == "1.10.2" || ${1} == "1.10.1" ]]; then
-        export SLUGIFY_USES_TEXT_UNIDECODE=yes
+}
+
+function uninstall_providers() {
+    echo
+    echo "Uninstalling all provider packages"
+    echo
+    local provider_packages_to_uninstall
+    provider_packages_to_uninstall=$(pip freeze | grep apache-airflow-providers || true)
+    if [[ -n ${provider_packages_to_uninstall} ]]; then
+        echo "${provider_packages_to_uninstall}" | xargs pip uninstall -y || true 2>/dev/null
     fi
+}
+
+function uninstall_airflow_and_providers() {
+    uninstall_providers
+    uninstall_airflow
+}
+
+function install_all_airflow_dependencies() {
+    echo
+    echo "Installing dependencies from 'all' extras"
+    echo
+    pip install ".[all]" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_released_airflow_version() {
+    local version="${1}"
+    local extras="${2}"
+    echo
+    echo "Installing released ${version} version of airflow with extras ${extras}"
+    echo
+
     rm -rf "${AIRFLOW_SOURCES}"/*.egg-info
-    INSTALLS=("apache-airflow==${1}" "werkzeug<1.0.0")
-    pip install --upgrade "${INSTALLS[@]}"
+    pip install --upgrade "apache-airflow${extras}==${version}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_all_provider_packages_from_wheels() {
+    echo
+    echo "Installing all provider packages from wheels"
+    echo
+    pip install /dist/apache_airflow*providers_*.whl >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
 
+function install_all_provider_packages_from_tar_gz_files() {
+    echo
+    echo "Installing all provider packages from .tar.gz"
+    echo
+    pip install /dist/apache-airflow-*providers-*.tar.gz >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+
+function verify_suffix_versions_for_package_preparation() {
+    TARGET_VERSION_SUFFIX=""
+    FILE_VERSION_SUFFIX=""
+
+    VERSION_SUFFIX_FOR_PYPI=${VERSION_SUFFIX_FOR_PYPI:=""}
+    readonly VERSION_SUFFIX_FOR_PYPI
+
+    VERSION_SUFFIX_FOR_SVN=${VERSION_SUFFIX_FOR_SVN:=""}
+
+    if [[ ${VERSION_SUFFIX_FOR_PYPI} != "" ]]; then
+        echo
+        echo "Version suffix for PyPI = ${VERSION_SUFFIX_FOR_PYPI}"
+        echo
+    fi
+    if [[ ${VERSION_SUFFIX_FOR_SVN} != "" ]]; then
+        echo
+        echo "Version suffix for SVN  = ${VERSION_SUFFIX_FOR_SVN}"
+        echo
+    fi
+
+    if [[ ${VERSION_SUFFIX_FOR_SVN} =~ ^rc ]]; then
+        echo """
+${COLOR_YELLOW_WARNING} The version suffix for SVN is used only for file names.
+         The version inside the packages has no version suffix.
+         This way we can just rename files when they graduate to final release.
+${COLOR_RESET}
+"""
+        echo
+        echo "This suffix is added '${VERSION_SUFFIX_FOR_SVN}' "
+        echo
+        FILE_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_SVN}
+        VERSION_SUFFIX_FOR_SVN=""
+    fi
+    readonly FILE_VERSION_SUFFIX
+    readonly VERSION_SUFFIX_FOR_SVN
+
+    export FILE_VERSION_SUFFIX
+    export VERSION_SUFFIX_FOR_SVN
+    export VERSION_SUFFIX_FOR_PYPI
+
+    if [[ ${VERSION_SUFFIX_FOR_PYPI} != '' && ${VERSION_SUFFIX_FOR_SVN} != '' ]]; then
+        if [[ ${VERSION_SUFFIX_FOR_PYPI} != "${VERSION_SUFFIX_FOR_SVN}" ]]; then
+            echo
+            echo "${COLOR_RED_ERROR} If you specify both PyPI and SVN version suffixes they must match  ${COLOR_RESET}"
+            echo
+            echo "However they are different: PyPI:'${VERSION_SUFFIX_FOR_PYPI}' vs. SVN:'${VERSION_SUFFIX_FOR_SVN}'"
+            echo
+            exit 1
+        else
+            if [[ ${VERSION_SUFFIX_FOR_PYPI} =~ ^rc ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} If you prepare an RC candidate, you need to specify only PyPI suffix  ${COLOR_RESET}"
+                echo
+                echo "However you specified both: PyPI'${VERSION_SUFFIX_FOR_PYPI}' and SVN '${VERSION_SUFFIX_FOR_SVN}'"
+                echo
+                exit 2
+            fi
+            # Just use one of them - they are both the same:
+            TARGET_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_PYPI}
+        fi
+    else
+        if [[ ${VERSION_SUFFIX_FOR_PYPI} == '' && ${VERSION_SUFFIX_FOR_SVN} == '' ]]; then
+            # Preparing "official version"
+            TARGET_VERSION_SUFFIX=""
+        else
+
+            if [[ ${VERSION_SUFFIX_FOR_PYPI} == '' ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} You should never specify version for PyPI only.  ${COLOR_RESET}"
+                echo
+                echo "You specified PyPI suffix: '${VERSION_SUFFIX_FOR_PYPI}'"
+                echo
+                exit 3
+            fi
+            TARGET_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_PYPI}${VERSION_SUFFIX_FOR_SVN}
+            if [[ ! ${TARGET_VERSION_SUFFIX} =~ rc.* ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} If you prepare an alpha/beta release, you need to specify both PyPI/SVN suffixes and they have to match.  ${COLOR_RESET}"
+                echo
+                echo "And they have to match. You specified only one suffix:  ${TARGET_VERSION_SUFFIX}."
+                echo
+                exit 4
+            fi
+        fi
+    fi
+    readonly TARGET_VERSION_SUFFIX
+    export TARGET_VERSION_SUFFIX
+}
+
+function filename_to_python_module() {
+    # Turn the file name into a python package name
+    file="$1"
+    no_leading_dotslash="${file#./}"
+    no_py="${no_leading_dotslash/.py/}"
+    no_init="${no_py/\/__init__/}"
+    echo "${no_init//\//.}"
+}
+
+function import_all_provider_classes() {
+    echo
+    echo Importing all Airflow classes
+    echo
+
+    # We have to move to a directory where "airflow" is
+    unset PYTHONPATH
+    # We need to make sure we are not in the airflow checkout, otherwise it will automatically be added to the
+    # import path
+    cd /
+
+    declare -a IMPORT_CLASS_PARAMETERS
+
+    PROVIDER_PATHS=$(
+        python3 <<EOF 2>/dev/null
+import airflow.providers;
+path=airflow.providers.__path__
+for p in path._path:
+    print(p)
+EOF
+    )
+    export PROVIDER_PATHS
+
+    echo "Searching for providers packages in:"
+    echo "${PROVIDER_PATHS}"
+
+    while read -r provider_path; do
+        IMPORT_CLASS_PARAMETERS+=("--path" "${provider_path}")
+    done < <(echo "${PROVIDER_PATHS}")
+
+    python3 /opt/airflow/dev/import_all_classes.py "${IMPORT_CLASS_PARAMETERS[@]}"
+}
 
 function in_container_set_colors() {
     COLOR_BLUE=$'\e[34m'