You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/08/02 13:43:55 UTC

[airflow] branch v2-1-test updated (aa2bcd2 -> 3d2a5cf)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a change to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


    from aa2bcd2  Converts the specification of branch for pushes to be flexible (#17065)
     new 696352b  Support secret backends/airflow.cfg for celery broker in entrypoint_prod.sh (#17069)
     new 08c8473  Fixes UI assets compilation from PROD image built from sources (#17086)
     new c6333db  Avoid logging in to GitHub Container Registry when not in CI (#17169)
     new 4b40eff  Do not fail-fast kubernetes tests (#17228)
     new 55dccc7  Remove support for Airflow 1.10 cmds in entrypoint_prod.sh (#17248)
     new fedce03  Stop attempting to pull base python image when pulling commit hash (#17231)
     new 6387961  Do not use constraints when preparing venv for k8s tests on CI (#17290)
     new 3c7ea9c  Uses current sources when running k8s tests (#17289)
     new 029a709  Fix breeze kind-cluster deploy failing with ECONREFUSED (#17293)
     new 0582cce  Fix typo in build_images (#17327)
     new 9a09b29  Added print statements for clarity in provider yaml checks (#17322)
     new cc51e1f  Adds compile_assets to INSTALL (#17377)
     new ca67972  Moves SchedulerJob initialization to within daemon context (#17157)
     new 358a1cc  Update best-practices.rst (#17357)
     new f70ef6d  Fix link (#17351)
     new 7ba20f5  docs: fix inconsistencies in configuration docs (#17317)
     new 67f04d2  Grammar and clarity pass on documentation (#17318)
     new 3de7f82  Fix docs link for using SQLite as Metadata DB (#17308)
     new e1a3acf  More optimized lazy-loading of provider information (#17304)
     new bb59cc5  Doc: Strip unnecessary arguments from MariaDB JIRA URL (#17296)
     new d415256  Fix typo in webserver.rst (#17288)
     new 3d2a5cf  Switches to "/" convention in ghcr.io images with optimisations

The 22 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .dockerignore                                      |   4 +-
 .github/workflows/build-images.yml                 |  20 ++-
 .github/workflows/ci.yml                           | 164 +++++++--------------
 BREEZE.rst                                         |   4 +-
 CI.rst                                             |  51 ++++---
 Dockerfile                                         |  38 +++--
 Dockerfile.ci                                      |  54 +++----
 IMAGES.rst                                         |  30 ++--
 INSTALL                                            |   5 +
 README.md                                          |   4 +-
 airflow/__init__.py                                |   6 +-
 airflow/cli/commands/scheduler_command.py          |  16 +-
 airflow/config_templates/config.yml                |  14 +-
 airflow/config_templates/default_airflow.cfg       |  14 +-
 airflow/configuration.py                           |   2 +-
 airflow/providers_manager.py                       |  62 ++++++--
 breeze                                             |  23 +--
 breeze-complete                                    |   2 +-
 dev/retag_docker_images.py                         |   9 +-
 docs/apache-airflow/best-practices.rst             |   4 +-
 docs/apache-airflow/concepts/scheduler.rst         |   9 +-
 docs/apache-airflow/index.rst                      |   2 +-
 .../logging-monitoring/check-health.rst            |   8 +-
 docs/apache-airflow/logging-monitoring/errors.rst  |   4 +-
 docs/apache-airflow/logging-monitoring/metrics.rst |   6 +-
 docs/apache-airflow/security/webserver.rst         |   4 +-
 docs/apache-airflow/start/docker.rst               |   2 +-
 docs/apache-airflow/start/index.rst                |   2 +-
 docs/apache-airflow/timezone.rst                   |  20 +--
 docs/docker-stack/build-arg-ref.rst                |   6 -
 docs/docker-stack/entrypoint.rst                   |   9 +-
 scripts/ci/images/ci_prepare_ci_image_on_ci.sh     |  19 +--
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  29 +---
 .../ci_push_legacy_ci_images.sh}                   |  13 +-
 .../ci_push_legacy_prod_images.sh}                 |  13 +-
 .../images/ci_wait_for_and_verify_all_ci_images.sh |   2 +
 .../ci_wait_for_and_verify_all_prod_images.sh      |   2 +
 .../ci/images/ci_wait_for_and_verify_ci_image.sh   |  27 ++--
 .../ci/images/ci_wait_for_and_verify_prod_image.sh |  32 ++--
 scripts/ci/kubernetes/ci_run_kubernetes_tests.sh   |  16 +-
 scripts/ci/libraries/_build_images.sh              | 123 +++++++++-------
 scripts/ci/libraries/_initialization.sh            |  26 +---
 scripts/ci/libraries/_kind.sh                      |  21 ++-
 scripts/ci/libraries/_parallel.sh                  |   7 +-
 scripts/ci/libraries/_push_pull_remove_images.sh   | 127 ++++++++++------
 scripts/ci/libraries/_script_init.sh               |   2 +-
 .../pre_commit_check_provider_yaml_files.py        |  14 +-
 scripts/ci/selective_ci_checks.sh                  |  10 +-
 scripts/ci/tools/fix_ownership.sh                  |  14 +-
 scripts/docker/compile_www_assets.sh               |   9 +-
 scripts/docker/install_additional_dependencies.sh  |   5 +-
 scripts/docker/install_airflow.sh                  |   4 +-
 ...nstall_airflow_dependencies_from_branch_tip.sh} |  11 +-
 .../docker/install_from_docker_context_files.sh    |   2 +-
 scripts/in_container/prod/entrypoint_prod.sh       |  19 ++-
 55 files changed, 599 insertions(+), 546 deletions(-)
 copy scripts/ci/{pre_commit/pre_commit_ci_build.sh => images/ci_push_legacy_ci_images.sh} (71%)
 copy scripts/ci/{pre_commit/pre_commit_ci_build.sh => images/ci_push_legacy_prod_images.sh} (68%)
 rename scripts/docker/{install_airflow_from_branch_tip.sh => install_airflow_dependencies_from_branch_tip.sh} (85%)

[airflow] 03/22: Avoid logging in to GitHub Container Registry when not in CI (#17169)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c6333db906ad8f08446258b6dc419d91a28998f5
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 22 22:36:16 2021 +0200

    Avoid logging in to GitHub Container Registry when not in CI (#17169)
    
    * Avoid logging in to GitHub Container Registry when not in CI
    
    When GITHUB_TOKEN was set in environment, attempt to login to
    https://ghcr.io/ was made. But GITHUB_TOKEN is commonly used to
    authenticate and if you happened to not have access there the
    attempt failed.
    
    This PR only attempts to login when the
    `AIRFLOW_LOGIN_TO_GITHUB_REGISTRY` variable is set to `true`
    and sets the variable in CI.
    
    (cherry picked from commit ad83feaaf6100124993ae76709fa23cbfffca1af)
---
 .github/workflows/build-images.yml    | 1 +
 .github/workflows/ci.yml              | 1 +
 scripts/ci/libraries/_build_images.sh | 9 ++++++---
 3 files changed, 8 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 433187c..f29e199 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -47,6 +47,7 @@ env:
   GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false"
   INSTALL_PROVIDERS_FROM_SOURCES: "true"
   TARGET_COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
+  AIRFLOW_LOGIN_TO_GITHUB_REGISTRY: "true"
 
 concurrency:
   group: build-${{ github.event.pull_request.number || github.ref }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c95132a..b5f2fd7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -50,6 +50,7 @@ env:
   GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ github.event.pull_request.head.sha || github.sha }}"
   GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
   INSTALL_PROVIDERS_FROM_SOURCES: "true"
+  AIRFLOW_LOGIN_TO_GITHUB_REGISTRY: "true"
 
   # You can switch between building the image in "Build Images" workflow or building them in CI workflow
   # Separately for each job.
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index ed6af67..94f5c8e 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -410,12 +410,15 @@ function build_images::get_docker_image_names() {
 # Also enable experimental features of docker (we need `docker manifest` command)
 function build_images::configure_docker_registry() {
     local token="${GITHUB_TOKEN}"
-    if [[ -z "${token}" ]] ; then
+    if [[ -z "${token}" ]]; then
         verbosity::print_info
         verbosity::print_info "Skip logging in to GitHub Registry. No Token available!"
         verbosity::print_info
-    fi
-    if [[ -n "${token}" ]]; then
+    elif [[ ${AIRFLOW_LOGIN_TO_GITHUB_REGISTRY=} != "true" ]]; then
+        verbosity::print_info
+        verbosity::print_info "Skip logging in to GitHub Registry. AIRFLOW_LOGIN_TO_GITHUB_REGISTRY != true"
+        verbosity::print_info
+    elif [[ -n "${token}" ]]; then
         echo "${token}" | docker_v login \
             --username "${GITHUB_USERNAME:-apache}" \
             --password-stdin \

[airflow] 18/22: Fix docs link for using SQLite as Metadata DB (#17308)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3de7f8265005b0211bdb2d8b254f30f973192084
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Thu Jul 29 16:45:43 2021 +0100

    Fix docs link for using SQLite as Metadata DB (#17308)
    
    Identified the issue in: https://apache-airflow.slack.com/archives/CCQ7EGB1P/p1627558105383900
    
    The page should have been https://airflow.apache.org/docs/apache-airflow/2.1.2/howto/set-up-database.html#setting-up-a-sqlite-database and not https://airflow.apache.org/docs/apache-airflow/2.1.2/howto/set-up-database.rst#setting-up-a-sqlite-database
    
    (cherry picked from commit d6e02dc0d1e3e9663eccac9f60d09020144f0dcb)
---
 airflow/configuration.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/airflow/configuration.py b/airflow/configuration.py
index 8ea38fe..23cfb23 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -240,7 +240,7 @@ class AirflowConfigParser(ConfigParser):
             if StrictVersion(sqlite3.sqlite_version) < StrictVersion(min_sqlite_version):
                 raise AirflowConfigException(
                     f"error: sqlite C library version too old (< {min_sqlite_version}). "
-                    f"See {get_docs_url('howto/set-up-database.rst#setting-up-a-sqlite-database')}"
+                    f"See {get_docs_url('howto/set-up-database.html#setting-up-a-sqlite-database')}"
                 )
 
         if self.has_option('core', 'mp_start_method'):

[airflow] 04/22: Do not fail-fast kubernetes tests (#17228)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4b40effce6c60bd0bcb073edc08404a3327477bb
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jul 26 12:04:30 2021 +0200

    Do not fail-fast kubernetes tests (#17228)
    
    When any of the k8s tests fails, all others were cancelled.
    
    This is not a good idea when we have transient errors because the
    failure might be intermitted and we might want to merge change
    even if one of the K8S tests fail.
    
    (cherry picked from commit 10350aa80b531bb00e68c8f4d9e71a8fb912223e)
---
 .github/workflows/ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b5f2fd7..8c31784 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -930,6 +930,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     strategy:
       matrix:
         executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor]
+      fail-fast: false
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       BACKEND: postgres
@@ -985,7 +986,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     name: Helm Chart Executor Upgrade
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info, prod-images]
-
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       BACKEND: postgres

[airflow] 02/22: Fixes UI assets compilation from PROD image built from sources (#17086)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 08c8473d589203aa9a895985b274ad686cce7f06
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jul 19 19:52:15 2021 +0200

    Fixes UI assets compilation from PROD image built from sources (#17086)
    
    The #16577 change removed yarn.lock from installed packages
    and it removed the possibility of preparing assets after the
    package is installed - so far that was the way it was done in
    the PROD image built from sources. The asset compilation
    was supposed to work after the change but it was not
    performed in this case.
    
    The change fixes it by:
    
    * detecting properly if the PROD image is built from sources
      (INSTALLATION_METHOD)
    * compiling the assets from sources, not from package
    * installing airflow from sources AFTER assets were compiled
    
    Fixes #16939
    
    (cherry picked from commit 660027f65d5333368aad7f16d3c927b9615e60ac)
---
 Dockerfile                           | 11 ++++++-----
 scripts/docker/compile_www_assets.sh |  7 ++++++-
 2 files changed, 12 insertions(+), 6 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 8c32913..66c9649 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -248,15 +248,16 @@ ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} \
 WORKDIR /opt/airflow
 
 # hadolint ignore=SC2086, SC2010
-RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
-        bash /scripts/docker/install_from_docker_context_files.sh; \
-    elif [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
-        bash /scripts/docker/install_airflow.sh; \
-    else \
+RUN if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then \
         # only compile assets if the prod image is build from sources
         # otherwise they are already compiled-in
         bash /scripts/docker/compile_www_assets.sh; \
     fi; \
+    if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
+        bash /scripts/docker/install_from_docker_context_files.sh; \
+    elif [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
+        bash /scripts/docker/install_airflow.sh; \
+    fi; \
     if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
         bash /scripts/docker/install_additional_dependencies.sh; \
     fi; \
diff --git a/scripts/docker/compile_www_assets.sh b/scripts/docker/compile_www_assets.sh
index 01c5470..59a7017 100755
--- a/scripts/docker/compile_www_assets.sh
+++ b/scripts/docker/compile_www_assets.sh
@@ -28,7 +28,12 @@ function compile_www_assets() {
     md5sum_file="static/dist/sum.md5"
     readonly md5sum_file
     local www_dir
-    www_dir="$(python -m site --user-site)/airflow/www"
+    if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." ]]; then
+        # In case we are building from sources in production image, we should build the assets
+        www_dir="${AIRFLOW_SOURCES_TO}/airflow/www"
+    else
+        www_dir="$(python -m site --user-site)/airflow/www"
+    fi
     pushd ${www_dir} || exit 1
     yarn install --frozen-lockfile --no-cache
     yarn run prod

[airflow] 22/22: Switches to "/" convention in ghcr.io images with optimisations

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3d2a5cf7f6ed98cef2c398e46628242bb0abb270
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sat Jul 31 11:48:15 2021 +0200

    Switches to "/" convention in ghcr.io images with optimisations
    
    We are using ghcr.io as image cache for our CI builds and Breeze
    and it seems ghcr.io is being "rebuilt" while running.
    
    We had been using "airflow-<branch>.." image convention before,
    bacause multiple nesting levels of images were not supported,
    however we experienced errors recently with pushing 2.1 images
    (https://issues.apache.org/jira/browse/INFRA-22124) and during
    investigation it turned out, that it is possible now to use "/"
    in the name of the image, and while it still does not introduce
    multiple nesting levels and folder structure, the UI of GitHub
    treats it like that and if you have image which starts wiht
    "airflow/", the airflow prefix is stripped out and you can also
    have even more "/" in then name to introduce further hierarchy.
    
    Since we have to change image naming convention due to (still
    unresolved) bug with no permission to push the v2-1-test image
    we've decided to change naming convention for all our cache
    images to follow this - now available - "/" connvention to make
    it better structured and easier to manage/understand.
    
    Some more optimisations are implemented - Python, prod-build and
    ci-manifest images are only pushed when "latest" image is prepared.
    They are not needed for the COMMIT builds because we only need
    final images for those builds. This simplified the code quite
    a bit.
    
    CONTINUE_ON_PIP_CHECK_ERROR variable has been removed in favour
    of ignoring pip error when installing dependencies from branch
    tip. This might potentially happen for a short while when
    new changes have been merged, but constraints were not yet
    regenerated and we have conflicting dependencies.
    
    The .dockerignore was reviewed and builds were optimized for
    people who locally built provider packages and documentation,
    by excluding unnecessary files. Some instructions which run after
    the COPY . which did not need sourcer were moved before
    the COPY command. Those optimisatiions save 30-40 seconds of
    overhead when building the image (especially when you build
    images incrementally rather than rebuilding from scratch).
    
    PIP and HELM versions have been updated to latest available.
    
    Backwards-compatibility was implemented to allow PRs that have
    not been upgraded to continue building after this one is merged,
    also a workaround has been implemented to make this change
    to work even if it is not merged yet to main.
    
    This "legacy" mode will be removed in ~week when everybody rebase
    on top of main.
    
    Documentation is updated reflecting those changes.
    
    (cherry picked from commit e04c2e3872aa30ed042d3f9bf66d8020cf9c2acb)
---
 .dockerignore                                      |   4 +-
 .github/workflows/build-images.yml                 |  19 ++-
 .github/workflows/ci.yml                           | 161 +++++++--------------
 BREEZE.rst                                         |   4 +-
 CI.rst                                             |  51 ++++---
 Dockerfile                                         |  27 ++--
 Dockerfile.ci                                      |  54 +++----
 IMAGES.rst                                         |  30 ++--
 README.md                                          |   4 +-
 breeze                                             |  23 +--
 breeze-complete                                    |   2 +-
 dev/retag_docker_images.py                         |   9 +-
 docs/docker-stack/build-arg-ref.rst                |   6 -
 scripts/ci/images/ci_prepare_ci_image_on_ci.sh     |  19 +--
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  29 +---
 .../ci_push_legacy_ci_images.sh}                   |  25 +---
 .../ci_push_legacy_prod_images.sh}                 |  25 +---
 .../images/ci_wait_for_and_verify_all_ci_images.sh |   2 +
 .../ci_wait_for_and_verify_all_prod_images.sh      |   2 +
 .../ci/images/ci_wait_for_and_verify_ci_image.sh   |  27 ++--
 .../ci/images/ci_wait_for_and_verify_prod_image.sh |  32 ++--
 scripts/ci/libraries/_build_images.sh              | 112 ++++++++------
 scripts/ci/libraries/_initialization.sh            |  26 +---
 scripts/ci/libraries/_kind.sh                      |  16 +-
 scripts/ci/libraries/_parallel.sh                  |   7 +-
 scripts/ci/libraries/_push_pull_remove_images.sh   | 117 +++++++++------
 scripts/ci/libraries/_script_init.sh               |   2 +-
 scripts/ci/selective_ci_checks.sh                  |  10 +-
 scripts/ci/tools/fix_ownership.sh                  |  14 +-
 scripts/docker/install_additional_dependencies.sh  |   5 +-
 scripts/docker/install_airflow.sh                  |   4 +-
 ...nstall_airflow_dependencies_from_branch_tip.sh} |  11 +-
 .../docker/install_from_docker_context_files.sh    |   2 +-
 33 files changed, 421 insertions(+), 460 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index d10cfbc..f6113e2 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -40,9 +40,6 @@
 !scripts/in_container
 !scripts/docker
 
-# Add provider packages to the context
-!provider_packages
-
 # Add tests and kubernetes_tests to context.
 !tests
 !kubernetes_tests
@@ -129,3 +126,4 @@ airflow/www/static/docs
 # Exclude docs generated files
 docs/_build/
 docs/_api/
+docs/_doctrees/
diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index f29e199..d2a7b97 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -148,7 +148,6 @@ jobs:
       BACKEND: postgres
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
-      CONTINUE_ON_PIP_CHECK_FAILURE: "true"
       DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
       CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: >
         ${{ github.event_name == 'pull_request_target' && 'false' || 'true' }}
@@ -204,6 +203,10 @@ jobs:
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Push CI images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
         run: ./scripts/ci/images/ci_push_ci_images.sh
+      # Remove me ~ 7 August 2021
+      - name: "Push Legacy CI images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
+        run: ./scripts/ci/images/ci_push_legacy_ci_images.sh
+        if: github.event_name == 'pull_request_target'
 
   build-prod-images:
     permissions:
@@ -230,8 +233,11 @@ jobs:
       VERSION_SUFFIX_FOR_PYPI: ".dev0"
     steps:
       - name: Set envs
+        # Set pull image tag for CI image build, in order to pull the image pushed
+        # Just a moment ago by build-ci-images job
         run: |
           echo "GITHUB_REGISTRY_PUSH_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV"
+          echo "GITHUB_REGISTRY_PULL_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV"
       - uses: actions/checkout@v2
         with:
           ref: ${{ env.TARGET_COMMIT_SHA }}
@@ -279,10 +285,21 @@ jobs:
         # Pull images built in the previous step
         env:
           GITHUB_REGISTRY_WAIT_FOR_IMAGE: "true"
+          # Here we are using PULL_IMAGE_TAG set in the environment variables above
       - name: "Build PROD images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
         run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+        env:
+          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to build PROD image using "latest"
+          GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
       - name: "Push PROD images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
         run: ./scripts/ci/images/ci_push_production_images.sh
+        env:
+          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to build PROD image using "latest"
+          GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
+      # Remove me ~ 7 August 2021
+      - name: "Push Legacy PROD images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
+        run: ./scripts/ci/images/ci_push_legacy_prod_images.sh
+        if: github.event_name == 'pull_request_target'
 
   cancel-on-ci-build:
     permissions:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8c31784..7228f37 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -561,7 +561,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           PACKAGE_FORMAT: "sdist"
 
   tests-helm:
-    timeout-minutes: 20
+    timeout-minutes: 40
     name: "Python unit tests for helm chart"
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info, ci-images]
@@ -1045,108 +1045,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           path: /tmp/kind_logs_*
           retention-days: 7
 
-  push-prod-images-to-github-registry:
-    permissions:
-      packages: write
-    timeout-minutes: 10
-    name: "Push PROD images as cache to GitHub Registry"
-    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    needs:
-      - build-info
-      - static-checks
-      - tests-sqlite
-      - tests-postgres
-      - tests-mysql
-      - tests-kubernetes
-      - prod-images
-      - docs
-    if: >
-      (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
-      github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') &&
-      github.event_name != 'schedule'
-    strategy:
-      matrix:
-        python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
-    env:
-      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
-      - name: "Setup python"
-        uses: actions/setup-python@v2
-        with:
-          python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
-      - name: "Free space"
-        run: ./scripts/ci/tools/free_space.sh
-      - name: Set push-python-image
-        id: push-python-image
-        run: |
-          if [[ "${REF}" == 'refs/head/main' || "${REF}" == 'refs/head/main' ]]; then
-              echo "::set-output name=wanted::true"
-          else
-              echo "::set-output name=wanted::false"
-          fi
-        env:
-          REF: ${{ github.ref }}
-      - name:
-          "Prepare PROD image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
-        env:
-          # Since we are going to push both final image and build image segment, we need to pull the
-          # build image, in case we are pulling from registry rather than building.
-          WAIT_FOR_PROD_BUILD_IMAGE: "true"
-          WAIT_FOR_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}}
-      - name: "Push PROD images ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_push_production_images.sh
-        env:
-          PUSH_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}}
-
-  push-ci-images-to-github-registry:
-    permissions:
-      packages: write
-    timeout-minutes: 10
-    name: "Push CI images as cache to GitHub Registry"
-    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    needs:
-      - build-info
-      - static-checks
-      - tests-sqlite
-      - tests-postgres
-      - tests-mysql
-      - tests-kubernetes
-      - ci-images
-      - docs
-    if: >
-      (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
-      github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') &&
-      github.event_name != 'schedule'
-    strategy:
-      matrix:
-        python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
-    env:
-      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
-      - name: "Setup python"
-        uses: actions/setup-python@v2
-        with:
-          python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
-      - name: "Free space"
-        run: ./scripts/ci/tools/free_space.sh
-      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Push CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_push_ci_images.sh
-
   constraints:
     permissions:
       contents: write
@@ -1166,10 +1064,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: ${{needs.build-info.outputs.pythonVersionsListAsString}}
-    # Only run it for direct pushes
-    if: >
-      github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
-      github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test'
+    # Only run it for direct pushes and scheduled builds
+    if: github.event_name == 'push' || github.event_name == 'schedule'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -1203,17 +1099,68 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - name: "Set constraints branch name"
         id: constraints-branch
         run: ./scripts/ci/constraints/ci_branch_constraints.sh
+      # only actually push it when we are in apache/airflow repository
       - name: Checkout ${{ steps.constraints-branch.outputs.branch }}
         uses: actions/checkout@v2
+        if: github.repository == 'apache/airflow'
         with:
           path: "repo"
           ref: ${{ steps.constraints-branch.outputs.branch }}
           persist-credentials: false
       - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}"
         run: ./scripts/ci/constraints/ci_commit_constraints.sh
+        if: github.repository == 'apache/airflow'
       - name: "Push changes"
         uses: ./.github/actions/github-push-action
+        if: github.repository == 'apache/airflow'
         with:
           github_token: ${{ secrets.GITHUB_TOKEN }}
           branch: ${{ steps.constraints-branch.outputs.branch }}
           directory: "repo"
+
+  # Push images to GitHub Registry in Apache repository, if all tests are successful and build
+  # is executed as result of direct push to "main" or one of the "test" branches
+  # It actually rebuilds all images using just-pushed constraints if they changed
+  # It will also check if a new python image was released and will pull the latest one if needed
+  # Same as build-images.yaml
+  push-images-to-github-registry:
+    permissions:
+      packages: write
+    timeout-minutes: 10
+    name: "Push images as cache to GitHub Registry"
+    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+    needs:
+      - build-info
+      - constraints
+      - docs
+    # Only run it for direct pushes and scheduled builds
+    if: github.event_name == 'push' || github.event_name == 'schedule'
+    strategy:
+      matrix:
+        python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }}
+    env:
+      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+      PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
+      GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
+      GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
+      PUSH_PYTHON_BASE_IMAGE: "true"
+      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: "true"
+    steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+      - name: "Setup python"
+        uses: actions/setup-python@v2
+        with:
+          python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
+      - name: "Free space"
+        run: ./scripts/ci/tools/free_space.sh
+      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:latest"
+        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
+      - name: "Prepare PROD image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:latest"
+        run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+      - name: "Push CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest"
+        run: ./scripts/ci/images/ci_push_ci_images.sh
+      - name: "Push PROD images ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest"
+        run: ./scripts/ci/images/ci_push_production_images.sh
diff --git a/BREEZE.rst b/BREEZE.rst
index 90d3f0b..79f27b4 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2382,9 +2382,9 @@ This is the current syntax for  `./breeze <./breeze>`_:
           Helm version - only used in case one of kind-cluster commands is used.
           One of:
 
-                 v3.2.4
+                 v3.6.3
 
-          Default: v3.2.4
+          Default: v3.6.3
 
   --executor EXECUTOR
           Executor to use in a kubernetes cluster.
diff --git a/CI.rst b/CI.rst
index fedd300..6a704d0 100644
--- a/CI.rst
+++ b/CI.rst
@@ -568,12 +568,11 @@ This workflow is a regular workflow that performs all checks of Airflow code.
 +---------------------------+----------------------------------------------+-------+-------+------+
 | Tests Kubernetes          | Run Kubernetes test                          | Yes(2)| Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Push PROD images          | Pushes PROD images to GitHub Registry (4)    | -     | Yes   | -    |
-+---------------------------+----------------------------------------------+-------+-------+------+
-| Push CI images            | Pushes CI images to GitHub Registry (4)      | -     | Yes   | -    |
-+---------------------------+----------------------------------------------+-------+-------+------+
 | Constraints               | Upgrade constraints to latest ones (4)       | -     | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
+| Push images               | Pushes latest images to GitHub Registry (4)  | -     | Yes   | Yes  |
++---------------------------+----------------------------------------------+-------+-------+------+
+
 
 Comments:
 
@@ -584,8 +583,8 @@ Comments:
      You can set it to "false" to disable using shared images - this is slower though as the images
      are rebuilt in every job that needs them.
  (4) PROD and CI images are pushed as "latest" to GitHub Container registry and constraints are upgraded
-     only if all tests are successful. Note that images are not pushed in CRON jobs because they are rebuilt
-     from scratch and we want to push incremental changes to the Github Container registry.
+     only if all tests are successful. The images are rebuilt in this step using constraints pushed
+     in the previous step.
 
 CodeQL scan
 -----------
@@ -620,7 +619,9 @@ with the COMMIT_SHA id for images that were used in particular build.
 The image names follow the patterns (except the Python image, all the images are stored in
 https://ghcr.io/ in ``apache`` organization.
 
-The packages are available under:
+The packages are available under (CONTAINER_NAME is url-encoded name of the image). Note that "/" are
+supported now in the ``ghcr.io`` as apart of the image name within ``apache`` organization, but they
+have to be percent-encoded when you access them via UI (/ = %2F)
 
 ``https://github.com/apache/airflow/pkgs/container/<CONTAINER_NAME>``
 
@@ -631,26 +632,30 @@ The packages are available under:
 | (DockerHub)  |                                                          | Python maintainer release new versions of those image    |
 |              |                                                          | with security fixes every few weeks in DockerHub.        |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
-| Airflow      | airflow-python-v2:<X.Y>-slim-buster                      | Version of python base image used in Airflow Builds      |
-| python base  | or                                                       | We keep the "latest" version there and also each build   |
-| image        | airflow-python-v2:<X.Y>-slim-buster-<COMMIT_SHA>         | has an associated specific python version that was used. |
+| Airflow      | airflow/<BRANCH>/python:<X.Y>-slim-buster                | Version of python base image used in Airflow Builds      |
+| python base  |                                                          | We keep the "latest" version only to mark last "good"    |
+| image        |                                                          | python base that went through testing and was pushed.    |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
-| CI image     | airflow-<BRANCH>-python<X.Y>-ci-v2:latest                | CI image - this is the image used for most of the tests. |
-|              | or                                                       | Contains all provider dependencies and tools useful      |
-|              | airflow-<BRANCH>-python<X.Y>-ci-v2:<COMMIT_SHA>          | For testing. This image is used in Breeze.               |
+| PROD Build   | airflow/<BRANCH>/prod-build/python<X.Y>:latest           | Production Build image - this is the "build" stage of    |
+| image        |                                                          | production image. It contains build-essentials and all   |
+|              |                                                          | necessary apt packages to build/install PIP packages.    |
+|              |                                                          | We keep the "latest" version only to speed up builds.    |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
-| Manifest     | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:latest       | CI manifest image - this is the image used to optimize   |
-| CI image     | or                                                       | pulls and builds for Breeze development environment      |
-|              | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:<COMMIT_SHA> | They store hash indicating whether the image will be     |
+| Manifest     | airflow/<BRANCH>/ci-manifest/python<X.Y>:latest          | CI manifest image - this is the image used to optimize   |
+| CI image     |                                                          | pulls and builds for Breeze development environment      |
+|              |                                                          | They store hash indicating whether the image will be     |
 |              |                                                          | faster to build or pull.                                 |
+|              |                                                          | We keep the "latest" version only to help breeze to      |
+|              |                                                          | check if new image should be pulled.                     |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
-| PROD Build   | airflow-<BRANCH>-python<X.Y>-build-v2:latest             | Production Build image - this is the "build" segment of  |
-| image        | or                                                       | production image. It contains build-essentials and all   |
-|              | airflow-<BRANCH>-python<X.Y>-build-v2:<COMMIT_SHA>       | necessary packages to install PIP packages.              |
+| CI image     | airflow/<BRANCH>/ci/python<X.Y>:latest                   | CI image - this is the image used for most of the tests. |
+|              | or                                                       | Contains all provider dependencies and tools useful      |
+|              | airflow/<BRANCH>/ci/python<X.Y>:<COMMIT_SHA>             | For testing. This image is used in Breeze.               |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
-| PROD image   | airflow-<BRANCH>-python<X.Y>-v2:latest                   | Production image. This is the actual production image    |
+|              |                                                          | faster to build or pull.                                 |
+| PROD image   | airflow/<BRANCH>/prod/python<X.Y>:latest                 | Production image. This is the actual production image    |
 |              | or                                                       | optimized for size.                                      |
-|              | airflow-<BRANCH>-python<X.Y>-v2:<COMMIT_SHA>             | It contains only compiled libraries and minimal set of   |
+|              | airflow/<BRANCH>/prod/python<X.Y>:<COMMIT_SHA>           | It contains only compiled libraries and minimal set of   |
 |              |                                                          | dependencies to run Airflow.                             |
 +--------------+----------------------------------------------------------+----------------------------------------------------------+
 
@@ -668,9 +673,9 @@ For example knowing that the CI build was for commit ``cd27124534b46c9688a1d89e7
 
 .. code-block:: bash
 
-  docker pull ghcr.io/apache/airflow-main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3
+  docker pull ghcr.io/apache/airflow/main/ci/python3.6:cd27124534b46c9688a1d89e75fcd137ab5137e3
 
-  docker run -it ghcr.io/apache/airflow-main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3
+  docker run -it ghcr.io/apache/airflow/main/ci/python3.6:cd27124534b46c9688a1d89e75fcd137ab5137e3
 
 
 But you usually need to pass more variables and complex setup if you want to connect to a database or
diff --git a/Dockerfile b/Dockerfile
index 66c9649..210918c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -44,7 +44,8 @@ ARG AIRFLOW_GID="50000"
 
 ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
 
-ARG AIRFLOW_PIP_VERSION=21.1.2
+ARG AIRFLOW_PIP_VERSION=21.2.2
+ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
 
 # By default PIP has progress bar but you can disable it.
 ARG PIP_PROGRESS_BAR="on"
@@ -108,12 +109,13 @@ ARG DEV_APT_COMMAND="\
     && curl https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - > /dev/null \
     && echo 'deb https://dl.yarnpkg.com/debian/ stable main' > /etc/apt/sources.list.d/yarn.list"
 ARG ADDITIONAL_DEV_APT_COMMAND="echo"
+ARG ADDITIONAL_DEV_APT_ENV=""
 
 ENV DEV_APT_DEPS=${DEV_APT_DEPS} \
     ADDITIONAL_DEV_APT_DEPS=${ADDITIONAL_DEV_APT_DEPS} \
     DEV_APT_COMMAND=${DEV_APT_COMMAND} \
     ADDITIONAL_DEV_APT_COMMAND=${ADDITIONAL_DEV_APT_COMMAND} \
-    ADDITIONAL_DEV_APT_ENV=""
+    ADDITIONAL_DEV_APT_ENV=${ADDITIONAL_DEV_APT_ENV}
 
 # Note missing man directories on debian-buster
 # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=863199
@@ -216,7 +218,7 @@ ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \
 RUN bash /scripts/docker/install_pip_version.sh; \
     if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \
           ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \
-        bash /scripts/docker/install_airflow_from_branch_tip.sh; \
+        bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \
     fi
 
 COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
@@ -236,14 +238,11 @@ ARG INSTALL_FROM_PYPI="true"
 # * pyjwt<2.0.0: flask-jwt-extended requires it
 # * dill<0.3.3 required by apache-beam
 ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
-ARG CONTINUE_ON_PIP_CHECK_FAILURE="false"
-
 
 ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} \
     INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES} \
     INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI} \
-    EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \
-    CONTINUE_ON_PIP_CHECK_FAILURE=${CONTINUE_ON_PIP_CHECK_FAILURE}
+    EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS}
 
 WORKDIR /opt/airflow
 
@@ -276,7 +275,7 @@ RUN if [[ -f /docker-context-files/requirements.txt ]]; then \
 
 ARG BUILD_ID
 ARG COMMIT_SHA
-ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
+ARG AIRFLOW_IMAGE_REPOSITORY
 ARG AIRFLOW_IMAGE_DATE_CREATED
 
 ENV BUILD_ID=${BUILD_ID} COMMIT_SHA=${COMMIT_SHA}
@@ -293,15 +292,14 @@ LABEL org.apache.airflow.distro="debian" \
   org.opencontainers.image.created=${AIRFLOW_IMAGE_DATE_CREATED} \
   org.opencontainers.image.authors="dev@airflow.apache.org" \
   org.opencontainers.image.url="https://airflow.apache.org" \
-  org.opencontainers.image.documentation="https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html" \
-  org.opencontainers.image.source="https://github.com/apache/airflow" \
+  org.opencontainers.image.documentation="https://airflow.apache.org/docs/docker-stack/index.html" \
   org.opencontainers.image.version="${AIRFLOW_VERSION}" \
   org.opencontainers.image.revision="${COMMIT_SHA}" \
   org.opencontainers.image.vendor="Apache Software Foundation" \
   org.opencontainers.image.licenses="Apache-2.0" \
   org.opencontainers.image.ref.name="airflow-build-image" \
   org.opencontainers.image.title="Build Image Segment for Production Airflow Image" \
-  org.opencontainers.image.description="Installed Apache Airflow with build-time dependencies"
+  org.opencontainers.image.description="Reference build-time dependencies image for production-ready Apache Airflow image"
 
 ##############################################################################################
 # This is the actual Airflow image - much smaller than the build one. We copy
@@ -379,7 +377,7 @@ ARG AIRFLOW_HOME
 ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
 ARG BUILD_ID
 ARG COMMIT_SHA
-ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
+ARG AIRFLOW_IMAGE_REPOSITORY
 ARG AIRFLOW_IMAGE_DATE_CREATED
 # By default PIP will install everything in ~/.local
 ARG PIP_USER="true"
@@ -468,15 +466,14 @@ LABEL org.apache.airflow.distro="debian" \
   org.opencontainers.image.created=${AIRFLOW_IMAGE_DATE_CREATED} \
   org.opencontainers.image.authors="dev@airflow.apache.org" \
   org.opencontainers.image.url="https://airflow.apache.org" \
-  org.opencontainers.image.documentation="https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html" \
-  org.opencontainers.image.source="https://github.com/apache/airflow" \
+  org.opencontainers.image.documentation="https://airflow.apache.org/docs/docker-stack/index.html" \
   org.opencontainers.image.version="${AIRFLOW_VERSION}" \
   org.opencontainers.image.revision="${COMMIT_SHA}" \
   org.opencontainers.image.vendor="Apache Software Foundation" \
   org.opencontainers.image.licenses="Apache-2.0" \
   org.opencontainers.image.ref.name="airflow" \
   org.opencontainers.image.title="Production Airflow Image" \
-  org.opencontainers.image.description="Installed Apache Airflow"
+  org.opencontainers.image.description="Reference, production-ready Apache Airflow image"
 
 
 ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"]
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 7b1dbf1..cca868e 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -21,7 +21,8 @@ FROM ${PYTHON_BASE_IMAGE} as main
 SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]
 
 ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
-ARG AIRFLOW_VERSION="2.1.0.dev0"
+ARG AIRFLOW_VERSION="2.1.3.dev0"
+ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
 # By increasing this number we can do force build of all dependencies
 ARG DEPENDENCIES_EPOCH_NUMBER="6"
 
@@ -98,6 +99,7 @@ RUN mkdir -pv /usr/share/man/man1 \
 COPY scripts/docker/*.sh /scripts/docker/
 RUN bash /scripts/docker/install_mysql.sh dev \
     && adduser airflow \
+    && echo "airflow:airflow" | chpasswd \
     && echo "airflow ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/airflow \
     && chmod 0440 /etc/sudoers.d/airflow
 
@@ -125,6 +127,15 @@ ARG RUNTIME_APT_DEPS="\
       unzip \
       vim \
       xxd"
+
+# Install Helm
+ARG HELM_VERSION="v3.6.3"
+
+RUN SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') \
+    && HELM_URL="https://get.helm.sh/helm-${HELM_VERSION}-${SYSTEM}-amd64.tar.gz" \
+    && curl --location "${HELM_URL}" | tar -xvz -O "${SYSTEM}"-amd64/helm > /usr/local/bin/helm \
+    && chmod +x /usr/local/bin/helm
+
 ARG ADDITIONAL_RUNTIME_APT_DEPS=""
 ARG RUNTIME_APT_COMMAND=""
 ARG ADDITIONAL_RUNTIME_APT_COMMAND=""
@@ -207,7 +218,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 # By default in the image, we are installing all providers when installing from sources
 ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
 ARG INSTALL_FROM_PYPI="true"
-ARG AIRFLOW_PIP_VERSION=21.1.2
+ARG AIRFLOW_PIP_VERSION=21.2.2
 # Setup PIP
 # By default PIP install run without cache to make image smaller
 ARG PIP_NO_CACHE_DIR="true"
@@ -270,7 +281,7 @@ ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENT
 RUN bash /scripts/docker/install_pip_version.sh; \
     if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \
           ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \
-        bash /scripts/docker/install_airflow_from_branch_tip.sh; \
+        bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \
     fi
 
 # Generate random hex dump file so that we can determine whether it's faster to rebuild the image
@@ -299,8 +310,6 @@ COPY setup.cfg ${AIRFLOW_SOURCES}/setup.cfg
 
 COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/__init__.py
 
-ARG CONTINUE_ON_PIP_CHECK_FAILURE="false"
-
 # The goal of this line is to install the dependencies from the most current setup.py from sources
 # This will be usually incremental small set of packages in CI optimized build, so it will be very fast
 # In non-CI optimized build this will install all dependencies before installing sources.
@@ -325,11 +334,13 @@ RUN chmod a+x /entrypoint
 
 COPY scripts/docker/load.bash /opt/bats/lib/
 
-# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not
-# copying over stuff that is accidentally generated or that we do not need (such as egg-info)
-# if you want to add something that is missing and you expect to see it in the image you can
-# add it with ! in .dockerignore next to the airflow, test etc. directories there
-COPY . ${AIRFLOW_SOURCES}/
+# Additional python deps to install
+ARG ADDITIONAL_PYTHON_DEPS=""
+
+RUN bash /scripts/docker/install_pip_version.sh; \
+    if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
+            bash /scripts/docker/install_additional_dependencies.sh; \
+    fi
 
 # Install autocomplete for airflow
 RUN if command -v airflow; then \
@@ -339,27 +350,16 @@ RUN if command -v airflow; then \
 # Install autocomplete for Kubectl
 RUN echo "source /etc/bash_completion" >> ~/.bashrc
 
-WORKDIR ${AIRFLOW_SOURCES}
-
-# Install Helm
-ARG HELM_VERSION="v3.2.4"
-
-RUN SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') \
-    && HELM_URL="https://get.helm.sh/helm-${HELM_VERSION}-${SYSTEM}-amd64.tar.gz" \
-    && curl --location "${HELM_URL}" | tar -xvz -O "${SYSTEM}"-amd64/helm > /usr/local/bin/helm \
-    && chmod +x /usr/local/bin/helm
-
-# Additional python deps to install
-ARG ADDITIONAL_PYTHON_DEPS=""
+# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not
+# copying over stuff that is accidentally generated or that we do not need (such as egg-info)
+# if you want to add something that is missing and you expect to see it in the image you can
+# add it with ! in .dockerignore next to the airflow, test etc. directories there
+COPY . ${AIRFLOW_SOURCES}/
 
-RUN bash /scripts/docker/install_pip_version.sh; \
-    if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
-            bash /scripts/docker/install_additional_dependencies.sh; \
-    fi
+WORKDIR ${AIRFLOW_SOURCES}
 
 ARG BUILD_ID
 ARG COMMIT_SHA
-ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
 ARG AIRFLOW_IMAGE_DATE_CREATED
 
 ENV PATH="/files/bin/:/opt/airflow/scripts/in_container/bin/:${HOME}:${PATH}" \
diff --git a/IMAGES.rst b/IMAGES.rst
index 82e6989..a1b1ace 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -24,7 +24,7 @@ Airflow has two main images (build from Dockerfiles):
 
   * Production image (Dockerfile) - that can be used to build your own production-ready Airflow installation
     You can read more about building and using the production image in the
-    `Production Deployments <https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html>`_ document.
+    `Docker stack <https://airflow.apache.org/docs/docker-stack/index.html>`_ documentation.
     The image is built using `Dockerfile <Dockerfile>`_
 
   * CI image (Dockerfile.ci) - used for running tests and local development. The image is built using
@@ -246,19 +246,21 @@ Images with a commit SHA (built for pull requests and pushes)
 
 .. code-block:: bash
 
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-ci-v2:<COMMIT_SHA>    - for CI images
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-v2:<COMMIT_SHA>       - for production images
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:<COMMIT_SHA> - for production build stage
-  ghcr.io/apache/airflow-python-v2:X.Y-slim-buster-<COMMIT_SHA>   - for base Python images
+  ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:<COMMIT_SHA>         - for CI images
+  ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:<COMMIT_SHA>       - for production images
+
+We do not push Base Python images and prod-build images when we prepare COMMIT builds, because those
+images are never rebuilt locally, so there is no need to store base images specific for those builds.
 
 Latest images (pushed when main merge succeeds):
 
 .. code-block:: bash
 
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-ci-v2:latest    - for CI images
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-v2:latest       - for production images
-  ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:latest - for production build stage
-  ghcr.io/apache/airflow-python-v2:X.Y-slim-buster          - for base Python images
+  ghcr.io/apache/airflow/<BRANCH>/python:<X.Y>-slim-buster        - for base Python images
+  ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:latest           - for CI images
+  ghcr.io/apache/airflow/<BRANCH>/ci-manifest/python<X.Y>:latest  - for CI Manifest images
+  ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:latest         - for production images
+  ghcr.io/apache/airflow/<BRANCH>/prod-build/python<X.Y>:latest   - for production build stage
 
 You can see all the current GitHub images at `<https://github.com/apache/airflow/packages>`_
 
@@ -445,12 +447,6 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 |                                          |                                          | upgraded to newer versions matching      |
 |                                          |                                          | setup.py before installation.            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``CONTINUE_ON_PIP_CHECK_FAILURE``        | ``false``                                | By default the image will fail if pip    |
-|                                          |                                          | check fails for it. This is good for     |
-|                                          |                                          | interactive building but on CI the       |
-|                                          |                                          | image should be built regardless - we    |
-|                                          |                                          | have a separate step to verify image.    |
-+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``true``                                 | Allows to pre-cache airflow PIP packages |
 |                                          |                                          | from the GitHub of Apache Airflow        |
 |                                          |                                          | This allows to optimize iterations for   |
@@ -558,8 +554,8 @@ way of querying image details via API. You really need to download the image to
 We workaround it in the way that always when we build the image we build a very small image manifest
 containing randomly generated UUID and push it to registry together with the main CI image.
 The tag for the manifest image reflects the image it refers to with added ``-manifest`` suffix.
-The manifest image for ``ghcr.io/apache/airflow-main-python3.6-ci-v2`` is named
-``ghcr.io/apache/airflow-main-python3.6-ci-v2-manifest``.
+The manifest image for ``ghcr.io/apache/airflow/main/ci/python3.6`` is named
+``ghcr.io/apache/airflow/main/ci-manifest/python3.6``.
 
 The image is quickly pulled (it is really, really small) when important files change and the content
 of the randomly generated UUID is compared with the one in our image. If the contents are different
diff --git a/README.md b/README.md
index 9798136..d58a39f 100644
--- a/README.md
+++ b/README.md
@@ -209,8 +209,8 @@ Those are - in the order of most common ways people install Airflow:
 - [Docker Images](https://hub.docker.com/r/apache/airflow) to install airflow via
   `docker` tool, use them in Kubernetes, Helm Charts, `docker-compose`, `docker swarm` etc. You can
   read more about using, customising, and extending the images in the
-  [Latest docs](https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html), and
-  learn details on the internals in the [IMAGES.rst](IMAGES.rst) document.
+  [Latest docs](https://airflow.apache.org/docs/docker-stack/index.html), and
+  learn details on the internals in the [IMAGES.rst](https://github.com/apache/airflow/blob/main/IMAGES.rst) document.
 - [Tags in GitHub](https://github.com/apache/airflow/tags) to retrieve the git project sources that
   were used to generate official source packages via git
 
diff --git a/breeze b/breeze
index 7decdf6..2fe1458 100755
--- a/breeze
+++ b/breeze
@@ -164,6 +164,9 @@ function breeze::setup_default_breeze_constants() {
     # Can be overridden by '--force-build-images' flag.
     export FORCE_BUILD_IMAGES="false"
 
+    # When we push from breeze we always want to push base python images
+    export PUSH_PYTHON_BASE_IMAGE="true"
+
     # Determines whether to reinstall airflow at entering the image.
     export USE_AIRFLOW_VERSION=""
     # if set to true, the ci image will look for wheel packages in dist folder and will install them
@@ -569,8 +572,7 @@ EOF
 #   AIRFLOW_SOURCES
 #   AIRFLOW_CI_IMAGE
 #   AIRFLOW_PROD_IMAGE
-#   AIRFLOW_PROD_IMAGE_KUBERNETES
-#   AIRFLOW_PROD_BASE_TAG
+#   AIRFLOW_IMAGE_KUBERNETES
 #   SQLITE_URL
 #
 # Arguments:
@@ -633,8 +635,7 @@ export MYSQL_VERSION="${MYSQL_VERSION}"
 export AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
 export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}"
 export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
-export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE_KUBERNETES}"
-export AIRFLOW_PROD_BASE_TAG="${AIRFLOW_PROD_BASE_TAG}"
+export AIRFLOW_IMAGE_KUBERNETES="${AIRFLOW_IMAGE_KUBERNETES}"
 export SQLITE_URL="${SQLITE_URL}"
 export USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION}"
 export USE_PACKAGES_FROM_DIST="${USE_PACKAGES_FROM_DIST}"
@@ -650,7 +651,6 @@ EOF
 #
 # Global constants set:
 #
-#     PYTHON_BASE_IMAGE_VERSION
 #     PYTHON_BASE_IMAGE
 #     AIRFLOW_CI_IMAGE
 #     BUILT_CI_IMAGE_FLAG_FILE
@@ -934,7 +934,6 @@ function breeze::parse_arguments() {
             echo
             export DOCKER_CACHE="disabled"
             # if not set here, docker cached is determined later, depending on type of image to be build
-            readonly DOCKER_CACHE
             export FORCE_BUILD_IMAGES="true"
             shift
             ;;
@@ -950,7 +949,6 @@ function breeze::parse_arguments() {
             echo
             export DOCKER_CACHE="local"
             # if not set here, docker cached is determined later, depending on type of image to be build
-            readonly DOCKER_CACHE
             shift
             ;;
         -U | --build-cache-pulled)
@@ -958,14 +956,12 @@ function breeze::parse_arguments() {
             echo
             export DOCKER_CACHE="pulled"
             # if not set here, docker cached is determined later, depending on type of image to be build
-            readonly DOCKER_CACHE
             shift
             ;;
         -X | --build-cache-disabled)
             echo "Use disabled cache to build images"
             echo
             export DOCKER_CACHE="disabled"
-            readonly DOCKER_CACHE
             # if not set here, docker cached is determined later, depending on type of image to be build
             shift
             ;;
@@ -1126,7 +1122,6 @@ function breeze::parse_arguments() {
             export CHECK_IMAGE_FOR_REBUILD="false"
             export SKIP_BUILDING_PROD_IMAGE="true"
             export SKIP_CHECK_REMOTE_IMAGE="true"
-            export FAIL_ON_GITHUB_DOCKER_PULL_ERROR="true"
             shift 2
             ;;
         --init-script)
@@ -1177,12 +1172,6 @@ function breeze::parse_arguments() {
             echo
             shift
             ;;
-        --continue-on-pip-check-failure)
-            export CONTINUE_ON_PIP_CHECK_FAILURE="true"
-            echo "Skip PIP check failure."
-            echo
-            shift
-            ;;
         --package-format)
             export PACKAGE_FORMAT="${2}"
             echo "Selected package type: ${PACKAGE_FORMAT}"
@@ -3571,7 +3560,7 @@ breeze::check_and_save_all_params
 
 build_images::determine_docker_cache_strategy
 
-build_images::get_docker_image_names
+build_images::get_docker_cache_image_names
 
 initialization::make_constants_read_only
 
diff --git a/breeze-complete b/breeze-complete
index db9e42a..045bb65 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -29,7 +29,7 @@ _breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq r
 _breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers"
 _breeze_allowed_kubernetes_modes="image"
 _breeze_allowed_kubernetes_versions="v1.20.2 v1.19.7 v1.18.15"
-_breeze_allowed_helm_versions="v3.2.4"
+_breeze_allowed_helm_versions="v3.6.3"
 _breeze_allowed_kind_versions="v0.11.1"
 _breeze_allowed_mysql_versions="5.7 8"
 _breeze_allowed_postgres_versions="9.6 10 11 12 13"
diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py
index 5eeda8e..f29ce1b 100755
--- a/dev/retag_docker_images.py
+++ b/dev/retag_docker_images.py
@@ -36,10 +36,11 @@ PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
 GHCR_IO_PREFIX = "ghcr.io/apache/airflow"
 
 GHCR_IO_IMAGES = [
-    "{prefix}-{branch}-python{python_version}-ci-v2-manifest:latest",
-    "{prefix}-{branch}-python{python_version}-ci-v2:latest",
-    "{prefix}-{branch}-python{python_version}-v2:latest",
-    "{prefix}-{branch}-python{python_version}-build-v2:latest",
+    "{prefix}/{branch}/ci-manifest/python{python_version}:latest",
+    "{prefix}/{branch}/ci/python{python_version}:latest",
+    "{prefix}/{branch}/prod-build/python{python_version}-build-v2:latest",
+    "{prefix}/{branch}/prod/python{python_version}-build-v2:latest",
+    "{prefix}/{branch}/python:{python_version}-slim-buster",
 ]
 
 
diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst
index 8780970..f2507e0 100644
--- a/docs/docker-stack/build-arg-ref.rst
+++ b/docs/docker-stack/build-arg-ref.rst
@@ -79,12 +79,6 @@ for examples of using those arguments.
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | Build argument                           | Default value                            | Description                              |
 +==========================================+==========================================+==========================================+
-| ``CONTINUE_ON_PIP_CHECK_FAILURE``        | ``false``                                | By default the image build fails if pip  |
-|                                          |                                          | check fails for it. This is good for     |
-|                                          |                                          | interactive building but on CI the       |
-|                                          |                                          | image should be built regardless - we    |
-|                                          |                                          | have a separate step to verify image.    |
-+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``UPGRADE_TO_NEWER_DEPENDENCIES``        | ``false``                                | If set to true, the dependencies are     |
 |                                          |                                          | upgraded to newer versions matching      |
 |                                          |                                          | setup.py before installation.            |
diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
index a550038..8a89a30 100755
--- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
@@ -29,24 +29,13 @@ function build_ci_image_on_ci() {
 
     if [[ ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
         # Pretend that the image was build. We already have image with the right sources baked in!
+        # so all the checksums are assumed to be correct
         md5sum::calculate_md5sum_for_all_files
 
-        # Tries to wait for the images indefinitely
-        # skips further image checks - since we already have the target image
-
-        local python_tag_suffix=""
-        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then
-            python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        fi
-        # first we pull base python image. We will need it to re-push it after main build
-        # Becoming the new "latest" image for other builds
-        build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
-            "${python_tag_suffix}"
-
-        # And then the actual image
-        build_images::wait_for_image_tag "${AIRFLOW_CI_IMAGE}" \
-            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        # Remove me on 7th August 2021 after all users had chance to rebase
+        legacy_ci_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci-v2:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
+        build_images::wait_for_image_tag "${AIRFLOW_CI_IMAGE}" ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${legacy_ci_image}"
         md5sum::update_all_md5_with_group
     else
         build_images::rebuild_ci_image_if_needed
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index dbcb07d..14bd71f 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -33,36 +33,13 @@ function build_prod_images_on_ci() {
 
     if [[ ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
         # Tries to wait for the images indefinitely
-        # skips further image checks - since we already have the target image
-
-        local python_tag_suffix=""
-        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then
-            python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        fi
-
-        if [[ "${WAIT_FOR_PYTHON_BASE_IMAGE=}" == "true" ]]; then
-            # first we pull base python image. We will need it to re-push it after main build
-            # Becoming the new "latest" image for other builds
-            build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
-                "${python_tag_suffix}"
-        fi
-
-        # And then the actual image
-        build_images::wait_for_image_tag "${AIRFLOW_PROD_IMAGE}" \
-            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-
-        # And the prod build image
-        if [[ "${WAIT_FOR_PROD_BUILD_IMAGE=}" == "true" ]]; then
-            # If specified in variable - also waits for the build image
-            build_images::wait_for_image_tag "${AIRFLOW_PROD_BUILD_IMAGE}" \
-                ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        fi
-
+        # Remove me on 7th August 2021 after all users had chance to rebase
+        legacy_prod_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-v2:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        build_images::wait_for_image_tag "${AIRFLOW_PROD_IMAGE}" ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${legacy_prod_image}"
     else
         build_images::build_prod_images_from_locally_built_airflow_packages
     fi
 
-
     # Disable force pulling forced above this is needed for the subsequent scripts so that
     # They do not try to pull/build images again
     unset FORCE_PULL_IMAGES
diff --git a/scripts/ci/tools/fix_ownership.sh b/scripts/ci/images/ci_push_legacy_ci_images.sh
similarity index 58%
copy from scripts/ci/tools/fix_ownership.sh
copy to scripts/ci/images/ci_push_legacy_ci_images.sh
index 6ed1161..aa6696b 100755
--- a/scripts/ci/tools/fix_ownership.sh
+++ b/scripts/ci/images/ci_push_legacy_ci_images.sh
@@ -15,26 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-#
-# Fixes ownership for files created inside container (files owned by root will be owned by host user)
-#
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-if [[ ${OSTYPE} == "darwin"* ]]; then
-    # No need to fix ownership on MacOS - the filesystem there takes care about ownership mapping
-    exit
-fi
-
-declare -a EXTRA_DOCKER_FLAGS
-
-sanity_checks::sanitize_mounted_files
-
-read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)"
+# This script pushes legacy images to old-naming-convention images
+# It should be removed ~ 7th of August, giving users time to rebase their old pull requests
+build_images::prepare_ci_build
 
-docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
-    --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
-    "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
+legacy_ci_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci-v2:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+docker tag "${AIRFLOW_CI_IMAGE}" "${legacy_ci_image}"
+docker push "${legacy_ci_image}"
diff --git a/scripts/ci/tools/fix_ownership.sh b/scripts/ci/images/ci_push_legacy_prod_images.sh
similarity index 58%
copy from scripts/ci/tools/fix_ownership.sh
copy to scripts/ci/images/ci_push_legacy_prod_images.sh
index 6ed1161..3f74874 100755
--- a/scripts/ci/tools/fix_ownership.sh
+++ b/scripts/ci/images/ci_push_legacy_prod_images.sh
@@ -15,26 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-#
-# Fixes ownership for files created inside container (files owned by root will be owned by host user)
-#
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-if [[ ${OSTYPE} == "darwin"* ]]; then
-    # No need to fix ownership on MacOS - the filesystem there takes care about ownership mapping
-    exit
-fi
-
-declare -a EXTRA_DOCKER_FLAGS
-
-sanity_checks::sanitize_mounted_files
-
-read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)"
+# This script pushes legacy images to old-naming-convention images which will keep old PRs working before
+# Rebasing to main. It should be removed ~7th of August 2021, giving users time to rebase their old pull requests
+build_images::prepare_ci_build
 
-docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
-    --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
-    "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
+legacy_prod_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-v2:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+docker tag "${AIRFLOW_PROD_IMAGE}" "${legacy_prod_image}"
+docker push "${legacy_prod_image}"
diff --git a/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh b/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
index 4255374..39dfe8f 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
@@ -25,6 +25,8 @@ source "${LIBRARIES_DIR}/_all_libs.sh"
 
 initialization::set_output_color_variables
 
+PARALLEL_TAIL_LENGTH=5
+
 parallel::make_sure_gnu_parallel_is_installed
 
 parallel::make_sure_python_versions_are_specified
diff --git a/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh b/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
index 08ed54b..6786a31 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
@@ -25,6 +25,8 @@ source "${LIBRARIES_DIR}/_all_libs.sh"
 
 initialization::set_output_color_variables
 
+PARALLEL_TAIL_LENGTH=5
+
 parallel::make_sure_gnu_parallel_is_installed
 
 parallel::make_sure_python_versions_are_specified
diff --git a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
index 105bcfb..bb18100 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
@@ -28,25 +28,34 @@ shift
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+image_name_with_tag="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+# Remove me on 7th August 2021 after all users had chance to rebase
+legacy_ci_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci-v2:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
 function pull_ci_image() {
-    local image_name_with_tag="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-    start_end::group_start "Pulling ${image_name_with_tag} image"
-    push_pull_remove_images::pull_image_if_not_present_or_forced "${image_name_with_tag}"
+    start_end::group_start "Pulling image: ${IMAGE_AVAILABLE}"
+    push_pull_remove_images::pull_image_if_not_present_or_forced "${IMAGE_AVAILABLE}"
+    # Remove me on 7th August 2021 after all users had chance to rebase
+    if [[ ${IMAGE_AVAILABLE} != "${image_name_with_tag}" ]]; then
+        verbosity::print_info "Tagging the legacy ${IMAGE_AVAILABLE} with ${image_name_with_tag}"
+        docker tag "${IMAGE_AVAILABLE}" "${image_name_with_tag}"
+    fi
     start_end::group_end
-
 }
 
 start_end::group_start "Configure Docker Registry"
 build_images::configure_docker_registry
 start_end::group_end
 
-start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE}"
-
-push_pull_remove_images::wait_for_image "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+start_end::group_start "Waiting for ${image_name_with_tag}"
+# Remove me on 7th August 2021 after all users had chance to rebase
+push_pull_remove_images::wait_for_image "${image_name_with_tag}" "${legacy_ci_image}"
 build_images::prepare_ci_build
-pull_ci_image
 start_end::group_end
 
+pull_ci_image
+
 if [[ ${VERIFY_IMAGE=} != "false" ]]; then
-    verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+    verify_image::verify_ci_image "${image_name_with_tag}"
 fi
diff --git a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
index 482b0a5..d93da72 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
@@ -28,22 +28,34 @@ shift
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-start_end::group_start "Configure Docker Registry"
-build_images::configure_docker_registry
-start_end::group_end
+image_name_with_tag="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+# Remove me on 7th August 2021 after all users had chance to rebase
+legacy_prod_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-v2:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
-start_end::group_start "Waiting for ${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+function pull_prod_image() {
+    start_end::group_start  "Pulling image: ${IMAGE_AVAILABLE}"
+    push_pull_remove_images::pull_image_if_not_present_or_forced "${IMAGE_AVAILABLE}"
+    # Remove me on 7th August 2021 after all users had chance to rebase
+    if [[ ${IMAGE_AVAILABLE} != "${image_name_with_tag}" ]]; then
+        verbosity::print_info "Tagging the legacy ${IMAGE_AVAILABLE} with ${image_name_with_tag}"
+        docker tag "${IMAGE_AVAILABLE}" "${image_name_with_tag}"
+    fi
+    start_end::group_end
+}
 
-push_pull_remove_images::wait_for_image "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+start_end::group_start "Configure Docker Registry"
+build_images::configure_docker_registry
 start_end::group_end
 
-start_end::group_start "Pulling the PROD Image"
+start_end::group_start "Waiting for ${image_name_with_tag}"
+# Remove me on 7th August 2021 after all users had chance to rebase
+push_pull_remove_images::wait_for_image "${image_name_with_tag}" "${legacy_prod_image}"
 build_images::prepare_prod_build
-image_name_with_tag="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-verbosity::print_info "Pulling the ${image_name_with_tag} image and tagging with ${AIRFLOW_PROD_IMAGE}"
-push_pull_remove_images::pull_image_if_not_present_or_forced "${image_name_with_tag}"
 start_end::group_end
 
+pull_prod_image
+
 if [[ ${VERIFY_IMAGE=} != "false" ]]; then
-    verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+    verify_image::verify_prod_image "${image_name_with_tag}"
 fi
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index ca94e4c..dfbfe1a 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -251,7 +251,6 @@ EOF
 # Retrieves information about build cache hash random file from the local image
 #
 function build_images::get_local_build_cache_hash() {
-
     set +e
     # Remove the container just in case
     docker_v rm --force "local-airflow-ci-container" 2>/dev/null >/dev/null
@@ -262,6 +261,7 @@ function build_images::get_local_build_cache_hash() {
         LOCAL_MANIFEST_IMAGE_UNAVAILABLE="true"
         export LOCAL_MANIFEST_IMAGE_UNAVAILABLE
         touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}"
+        set -e
         return
 
     fi
@@ -296,6 +296,7 @@ function build_images::get_remote_image_build_cache_hash() {
         REMOTE_DOCKER_REGISTRY_UNREACHABLE="true"
         export REMOTE_DOCKER_REGISTRY_UNREACHABLE
         touch "${REMOTE_IMAGE_BUILD_CACHE_HASH_FILE}"
+        set -e
         return
     fi
     set -e
@@ -358,49 +359,44 @@ function build_images::get_github_container_registry_image_prefix() {
     echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]'
 }
 
-function build_images::get_docker_image_names() {
-    # python image version to use
-    export PYTHON_BASE_IMAGE_VERSION=${PYTHON_BASE_IMAGE_VERSION:=${PYTHON_MAJOR_MINOR_VERSION}}
-
+function build_images::get_docker_cache_image_names() {
     # Python base image to use
-    export PYTHON_BASE_IMAGE="python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
+    export PYTHON_BASE_IMAGE="python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster"
 
     local image_name
     image_name="${GITHUB_REGISTRY}/$(build_images::get_github_container_registry_image_prefix)"
 
-    # CI image base tag
-    export AIRFLOW_CI_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
+    # Example:
+    #  ghcr.io/apache/airflow/main/python:3.8-slim-buster
+    export AIRFLOW_PYTHON_BASE_IMAGE="${image_name}/${BRANCH_NAME}/python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster"
 
     # Example:
-    #  ghcr.io/apache/airflow-main-python3.8-ci-v2
-    export AIRFLOW_CI_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
+    #  ghcr.io/apache/airflow/main/ci/python3.8
+    export AIRFLOW_CI_IMAGE="${image_name}/${BRANCH_NAME}/ci/python${PYTHON_MAJOR_MINOR_VERSION}"
 
-    export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest:${AIRFLOW_CI_BASE_TAG}"
+    # Example:
+    #  local-airflow-ci-manifest/main/python3.8
+    export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest/${BRANCH_NAME}/python${PYTHON_MAJOR_MINOR_VERSION}"
 
     # Example:
-    #  ghcr.io/apache/airflow-main-python3.8-ci-v2-manifest
-    export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}-manifest"
+    #  ghcr.io/apache/airflow/main/ci-manifest/python3.8
+    export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}/${BRANCH_NAME}/ci-manifest/python${PYTHON_MAJOR_MINOR_VERSION}"
 
     # File that is touched when the CI image is built for the first time locally
     export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
 
-    # PROD image to build
-    export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
-
     # Example:
-    #  ghcr.io/apache/airflow-v2-1-test-python-v2:3.6-slim-buster
-    export AIRFLOW_PROD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
-
-    # PROD Kubernetes image to build
-    export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE}-kubernetes"
+    #  ghcr.io/apache/airflow/main/prod/python3.8
+    export AIRFLOW_PROD_IMAGE="${image_name}/${BRANCH_NAME}/prod/python${PYTHON_MAJOR_MINOR_VERSION}"
 
     # Example:
-    #   ghcr.io/apache/airflow-main-python3.6-build-v2
-    export AIRFLOW_PROD_BUILD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}-build${GITHUB_REGISTRY_IMAGE_SUFFIX}"
+    #   ghcr.io/apache/airflow/main/prod-build/python3.8
+    export AIRFLOW_PROD_BUILD_IMAGE="${image_name}/${BRANCH_NAME}/prod-build/python${PYTHON_MAJOR_MINOR_VERSION}"
+
+    # Kubernetes image to build
+    #  ghcr.io/apache/airflow/main/kubernetes/python3.8
+    export AIRFLOW_IMAGE_KUBERNETES="${image_name}/${BRANCH_NAME}/kubernetes/python${PYTHON_MAJOR_MINOR_VERSION}"
 
-    # Example:
-    #  ghcr.io/apache/airflow-python-v2:3.6-slim-buster
-    export AIRFLOW_PYTHON_BASE_IMAGE="${image_name}-python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
 
 
 }
@@ -669,7 +665,6 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \
         --build-arg UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \
-        --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \
         --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \
         --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="${DEFAULT_CONSTRAINTS_BRANCH}" \
         --build-arg AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS}" \
@@ -810,7 +805,6 @@ function build_images::build_prod_images() {
         --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
         --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \
-        --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \
         --build-arg BUILD_ID="${CI_BUILD_ID}" \
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
         --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \
@@ -845,7 +839,6 @@ function build_images::build_prod_images() {
         --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
         --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \
-        --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \
         --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
         --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
@@ -868,17 +861,37 @@ function build_images::build_prod_images() {
     fi
 }
 
+# Tags source image with names provided
+# $1 source image
+# $2, $3 - target image names
+function build_images::tag_image() {
+    local source_image_name="$1"
+    shift
+    local target_image_name
+    for target_image_name in "${@}"; do
+        echo
+        echo "Tagging ${source_image_name} as ${target_image_name}."
+        echo
+        docker_v tag "${source_image_name}" "${target_image_name}"
+    done
+}
+
 # Waits for image tag to appear in GitHub Registry, pulls it and tags with the target tag
 # Parameters:
 #  $1 - image name to wait for
-#  $2 - suffix of the image to wait for
-#  $3, $4, ... - target tags to tag the image with
+#  $2 - fallback image to wait for
+#  $3 - suffix of the image to wait for (Remove me on 7th August 2021 after all users had chance to rebase)
+#  $4, $5, ... - target tags to tag the image with
 function build_images::wait_for_image_tag() {
 
     local image_name="${1}"
     local image_suffix="${2}"
     shift 2
 
+    # Remove me 7th of August 2021
+    local legacy_image_to_pull="${1}"
+    shift
+
     local image_to_wait_for="${image_name}${image_suffix}"
     start_end::group_start "Wait for image tag ${image_to_wait_for}"
     while true; do
@@ -891,26 +904,34 @@ function build_images::wait_for_image_tag() {
         image_hash="$(docker images -q "${image_to_wait_for}" 2>>"${OUTPUT_LOG}" || true)"
         if [[ -z "${image_hash}" ]]; then
             echo
-            echo "The image ${image_to_wait_for} is not yet available. No local hash for the image. Waiting."
+            echo "The image ${image_to_wait_for} is not yet available. No local hash for the image. Falling bacl to legacy."
             echo
             echo "Last log:"
             cat "${OUTPUT_LOG}" || true
             echo
-            sleep 10
-        else
-            echo
-            echo "The image ${image_to_wait_for} with '${image_name}' tag"
-            echo
-            echo
-            echo "Tagging ${image_to_wait_for} as ${image_name}."
-            echo
-            docker_v tag "${image_to_wait_for}" "${image_name}"
-            for TARGET_TAG in "${@}"; do
+            echo "Checking Legacy image!"
+            # Legacy - Remove me 7th of August 2021
+            set +e
+            echo "${COLOR_BLUE}Docker pull ${legacy_image_to_pull} ${COLOR_RESET}" >"${OUTPUT_LOG}"
+            docker_v pull "${legacy_image_to_pull}" >>"${OUTPUT_LOG}" 2>&1
+            set -e
+            echo "${COLOR_BLUE} Docker images -q ${legacy_image_to_pull}${COLOR_RESET}" >>"${OUTPUT_LOG}"
+            image_hash="$(docker images -q "${legacy_image_to_pull}" 2>>"${OUTPUT_LOG}" || true)"
+            if [[ -z "${image_hash}" ]]; then
                 echo
-                echo "Tagging ${image_to_wait_for} as ${TARGET_TAG}."
+                echo "The image ${legacy_image_to_pull} is not yet available. No local hash for the image. Waiting."
                 echo
-                docker_v tag "${image_to_wait_for}" "${TARGET_TAG}"
-            done
+                echo "Last log:"
+                cat "${OUTPUT_LOG}" || true
+                sleep 10
+            else
+                # Legacy - Rremove me 7th of August 2021
+                # Pretend that the image we waited for was downloaded :)
+                build_images::tag_image "${legacy_image_to_pull}" "${image_to_wait_for}" "${image_name}:latest" "${@}"
+                break
+            fi
+        else
+            build_images::tag_image "${image_to_wait_for}" "${image_name}:latest" "${@}"
             break
         fi
     done
@@ -927,7 +948,6 @@ function build_images::determine_docker_cache_strategy() {
             export DOCKER_CACHE="pulled"
         fi
     fi
-    readonly DOCKER_CACHE
     verbosity::print_info
     verbosity::print_info "Using ${DOCKER_CACHE} cache strategy for the build."
     verbosity::print_info
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index cbeb932..a51cdd0 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -153,6 +153,10 @@ function initialization::initialize_base_variables() {
 
     # Dry run - only show docker-compose and docker commands but do not execute them
     export DRY_RUN_DOCKER=${DRY_RUN_DOCKER:="false"}
+
+    # By default we only push built ci/prod images - base python images are only pushed
+    # When requested
+    export PUSH_PYTHON_BASE_IMAGE=${PUSH_PYTHON_BASE_IMAGE:="false"}
 }
 
 # Determine current branch
@@ -200,7 +204,7 @@ function initialization::initialize_files_for_rebuild_check() {
         "scripts/docker/common.sh"
         "scripts/docker/install_additional_dependencies.sh"
         "scripts/docker/install_airflow.sh"
-        "scripts/docker/install_airflow_from_branch_tip.sh"
+        "scripts/docker/install_airflow_dependencies_from_branch_tip.sh"
         "scripts/docker/install_from_docker_context_files.sh"
         "scripts/docker/install_mysql.sh"
         "airflow/www/package.json"
@@ -282,9 +286,6 @@ function initialization::initialize_force_variables() {
 
     # Can be set to true to skip if the image is newer in registry
     export SKIP_CHECK_REMOTE_IMAGE=${SKIP_CHECK_REMOTE_IMAGE:="false"}
-
-    # Should be set to true if you expect image frm GitHub to be present and downloaded
-    export FAIL_ON_GITHUB_DOCKER_PULL_ERROR=${FAIL_ON_GITHUB_DOCKER_PULL_ERROR:="false"}
 }
 
 # Determine information about the host
@@ -401,7 +402,7 @@ function initialization::initialize_image_build_variables() {
     export INSTALLED_PROVIDERS
     export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
 
-    AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="21.1"}
+    AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="21.2.2"}
     export AIRFLOW_PIP_VERSION
 
     # We also pin version of wheel used to get consistent builds
@@ -425,9 +426,6 @@ function initialization::initialize_image_build_variables() {
     # Installs different airflow version than current from the sources
     export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""}
 
-    # Continue on PIP CHECK failure
-    export CONTINUE_ON_PIP_CHECK_FAILURE=${CONTINUE_ON_PIP_CHECK_FAILURE:="false"}
-
     # Determines if airflow should be installed from a specified reference in GitHub
     export INSTALL_AIRFLOW_REFERENCE=${INSTALL_AIRFLOW_REFERENCE:=""}
 
@@ -482,7 +480,7 @@ function initialization::initialize_kubernetes_variables() {
     CURRENT_KIND_VERSIONS+=("v0.11.1")
     export CURRENT_KIND_VERSIONS
     # Currently supported versions of Helm
-    CURRENT_HELM_VERSIONS+=("v3.2.4")
+    CURRENT_HELM_VERSIONS+=("v3.6.3")
     export CURRENT_HELM_VERSIONS
     # Current executor in chart
     CURRENT_EXECUTOR+=("KubernetesExecutor")
@@ -535,7 +533,6 @@ function initialization::initialize_git_variables() {
 function initialization::initialize_github_variables() {
     # Defaults for interacting with GitHub
     export GITHUB_REGISTRY="ghcr.io"
-    export GITHUB_REGISTRY_IMAGE_SUFFIX=${GITHUB_REGISTRY_IMAGE_SUFFIX:="-v2"}
     export GITHUB_REGISTRY_WAIT_FOR_IMAGE=${GITHUB_REGISTRY_WAIT_FOR_IMAGE:="false"}
     export GITHUB_REGISTRY_PULL_IMAGE_TAG=${GITHUB_REGISTRY_PULL_IMAGE_TAG:="latest"}
     export GITHUB_REGISTRY_PUSH_IMAGE_TAG=${GITHUB_REGISTRY_PUSH_IMAGE_TAG:="latest"}
@@ -634,7 +631,6 @@ Force variables:
     FORCE_BUILD_IMAGES: ${FORCE_BUILD_IMAGES}
     FORCE_ANSWER_TO_QUESTIONS: ${FORCE_ANSWER_TO_QUESTIONS}
     SKIP_CHECK_REMOTE_IMAGE: ${SKIP_CHECK_REMOTE_IMAGE}
-    FAIL_ON_GITHUB_DOCKER_PULL_ERROR: ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR}
 
 Host variables:
 
@@ -663,7 +659,6 @@ Common image build variables:
     INSTALL_FROM_PYPI: '${INSTALL_FROM_PYPI}'
     AIRFLOW_PRE_CACHED_PIP_PACKAGES: '${AIRFLOW_PRE_CACHED_PIP_PACKAGES}'
     UPGRADE_TO_NEWER_DEPENDENCIES: '${UPGRADE_TO_NEWER_DEPENDENCIES}'
-    CONTINUE_ON_PIP_CHECK_FAILURE: '${CONTINUE_ON_PIP_CHECK_FAILURE}'
     CHECK_IMAGE_FOR_REBUILD: '${CHECK_IMAGE_FOR_REBUILD}'
     AIRFLOW_CONSTRAINTS_LOCATION: '${AIRFLOW_CONSTRAINTS_LOCATION}'
     AIRFLOW_CONSTRAINTS_REFERENCE: '${AIRFLOW_CONSTRAINTS_REFERENCE}'
@@ -830,8 +825,6 @@ function initialization::make_constants_read_only() {
     readonly ADDITIONAL_RUNTIME_APT_DEPS
     readonly ADDITIONAL_RUNTIME_APT_ENV
 
-    readonly DOCKER_CACHE
-
     readonly GITHUB_REGISTRY
     readonly GITHUB_REGISTRY_WAIT_FOR_IMAGE
     readonly GITHUB_REGISTRY_PULL_IMAGE_TAG
@@ -847,11 +840,8 @@ function initialization::make_constants_read_only() {
 
     readonly VERSION_SUFFIX_FOR_PYPI
 
-    readonly PYTHON_BASE_IMAGE_VERSION
     readonly PYTHON_BASE_IMAGE
-    readonly AIRFLOW_CI_BASE_TAG
-    readonly AIRFLOW_PROD_BASE_TAG
-    readonly AIRFLOW_PROD_IMAGE_KUBERNETES
+    readonly AIRFLOW_IMAGE_KUBERNETES
     readonly BUILT_CI_IMAGE_FLAG_FILE
     readonly INIT_SCRIPT_FILE
 
diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index d4910d9..1fb77eb 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -262,8 +262,8 @@ function kind::build_image_for_kubernetes_tests() {
     if [[ -n ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} ]]; then
         image_tag="${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
     fi
-    echo "Building ${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest from ${AIRFLOW_PROD_IMAGE}:${image_tag}"
-    docker_v build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest" . -f - <<EOF
+    echo "Building ${AIRFLOW_IMAGE_KUBERNETES}:latest from ${AIRFLOW_PROD_IMAGE}:${image_tag}"
+    docker_v build --tag "${AIRFLOW_IMAGE_KUBERNETES}:latest" . -f - <<EOF
 FROM ${AIRFLOW_PROD_IMAGE}:${image_tag}
 
 COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/
@@ -271,11 +271,11 @@ COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/
 COPY airflow/kubernetes_executor_templates/ \${AIRFLOW_HOME}/pod_templates/
 
 EOF
-    echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES}:${image_tag} is prepared for test kubernetes deployment."
+    echo "The ${AIRFLOW_IMAGE_KUBERNETES}:${image_tag} is prepared for test kubernetes deployment."
 }
 
 function kind::load_image_to_kind_cluster() {
-    kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest"
+    kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_IMAGE_KUBERNETES}:latest"
 }
 
 MAX_NUM_TRIES_FOR_HEALTH_CHECK=12
@@ -343,8 +343,8 @@ function kind::deploy_airflow_with_helm() {
     helm install airflow . \
         --timeout 10m0s \
         --namespace "${HELM_AIRFLOW_NAMESPACE}" \
-        --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
-        --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "defaultAirflowRepository=${AIRFLOW_IMAGE_KUBERNETES}" \
+        --set "images.airflow.repository=${AIRFLOW_IMAGE_KUBERNETES}" \
         --set "images.airflow.tag=latest" -v 1 \
         --set "defaultAirflowTag=latest" -v 1 \
         --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \
@@ -376,8 +376,8 @@ function kind::upgrade_airflow_with_helm() {
     helm repo add stable https://charts.helm.sh/stable/
     helm dep update
     helm upgrade airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \
-        --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
-        --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "defaultAirflowRepository=${AIRFLOW_IMAGE_KUBERNETES}" \
+        --set "images.airflow.repository=${AIRFLOW_IMAGE_KUBERNETES}" \
         --set "images.airflow.tag=latest" -v 1 \
         --set "defaultAirflowTag=latest" -v 1 \
         --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \
diff --git a/scripts/ci/libraries/_parallel.sh b/scripts/ci/libraries/_parallel.sh
index f81fee0..69d362a 100644
--- a/scripts/ci/libraries/_parallel.sh
+++ b/scripts/ci/libraries/_parallel.sh
@@ -22,6 +22,9 @@
 function parallel::initialize_monitoring() {
     PARALLEL_MONITORED_DIR="$(mktemp -d)"
     export PARALLEL_MONITORED_DIR
+
+    PARALLEL_TAIL_LENGTH=${PARALLEL_TAIL_LENGTH:=2}
+    export PARALLEL_TAIL_LENGTH
 }
 
 function parallel::make_sure_gnu_parallel_is_installed() {
@@ -81,9 +84,9 @@ function parallel::monitor_loop() {
               continue
             fi
 
-            echo "${COLOR_BLUE}### The last lines for ${parallel_process} process: ${directory}/stdout ###${COLOR_RESET}"
+            echo "${COLOR_BLUE}### The last ${PARALLEL_TAIL_LENGTH} lines for ${parallel_process} process: ${directory}/stdout ###${COLOR_RESET}"
             echo
-            tail -2 "${directory}/stdout" || true
+            tail "-${PARALLEL_TAIL_LENGTH}" "${directory}/stdout" || true
             echo
 
             if [[ -s "${directory}/status" ]]; then
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index 0e99b10..847d010 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -48,6 +48,7 @@ function push_pull_remove_images::push_image_with_retries() {
 # Should be run with set +e
 # Parameters:
 #   $1 -> image to pull
+#   $2 - fallback image
 function push_pull_remove_images::pull_image_if_not_present_or_forced() {
     local image_to_pull="${1}"
     local image_hash
@@ -62,25 +63,6 @@ function push_pull_remove_images::pull_image_if_not_present_or_forced() {
         echo "Pulling the image ${image_to_pull}"
         echo
         docker_v pull "${image_to_pull}"
-        local exit_value="$?"
-        if [[ ${exit_value} != "0" && ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR} == "true" ]]; then
-            echo
-            echo """
-${COLOR_RED}ERROR: Exiting on docker pull error
-
-If you have authorisation problems, you might want to run:
-
-docker login ${image_to_pull%%\/*}
-
-You need to use generate token as the password, not your personal password.
-You can generate one at https://github.com/settings/tokens
-Make sure to choose 'read:packages' scope.
-${COLOR_RESET}
-"""
-            exit ${exit_value}
-        fi
-        echo
-        return ${exit_value}
     fi
 }
 
@@ -90,7 +72,7 @@ function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed(
    local dockerhub_python_version
    dockerhub_python_version=$(docker run "${PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)')
    local local_python_version
-   local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)')
+   local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)' || true)
    if [[ ${local_python_version} != "${dockerhub_python_version}" ]]; then
        echo
        echo "There is a new Python Base image updated!"
@@ -102,6 +84,10 @@ function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed(
             docker_v build \
                 --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
                 -t "${AIRFLOW_PYTHON_BASE_IMAGE}" -
+  else
+      echo
+      echo "Not rebuilding the base python image - the image has the same python version ${dockerhub_python_version}"
+      echo
   fi
 }
 
@@ -116,10 +102,10 @@ function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed(
 #     it will pull the right image using the specified suffix
 function push_pull_remove_images::pull_base_python_image() {
     echo
-    echo "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}"
+    echo "Docker pull base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}"
     echo
     if [[ -n ${DETECTED_TERMINAL=} ]]; then
-        echo -n "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}
+        echo -n "Docker pull base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}
 " > "${DETECTED_TERMINAL}"
     fi
     if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then
@@ -132,8 +118,14 @@ function push_pull_remove_images::pull_base_python_image() {
             return 1
         fi
     else
+        set +e
         push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PYTHON_BASE_IMAGE}"
-        if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]] ; then
+        local res="$?"
+        set -e
+        if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" || ${res} != "0" ]] ; then
+            # Rebuild the base python image using DockerHub - either when we explicitly want it
+            # or when there is no image available yet in ghcr.io (usually when you build it for the
+            # first time in your repository
             push_pull_remove_images::check_and_rebuild_python_base_image_if_needed
         fi
     fi
@@ -151,8 +143,26 @@ function push_pull_remove_images::pull_ci_images_if_needed() {
         fi
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
+        set +e
         push_pull_remove_images::pull_image_if_not_present_or_forced \
             "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        local res="$?"
+        set -e
+        if [[ ${res} != "0" ]]; then
+            if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then
+                echo
+                echo "The CI image cache does not exist. This is likely the first time you build the image"
+                echo "Switching to 'local' cache for docker images"
+                echo
+                DOCKER_CACHE="local"
+            else
+                echo
+                echo "The CI image cache does not exist and we want to pull tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+                echo "Failing as we have to pull the tagged image in order to continue"
+                echo
+                return "${res}"
+            fi
+        fi
     fi
 }
 
@@ -169,12 +179,33 @@ function push_pull_remove_images::pull_prod_images_if_needed() {
         fi
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
+        set +e
         # "Build" segment of production image
         push_pull_remove_images::pull_image_if_not_present_or_forced \
             "${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        # "Main" segment of production image
-        push_pull_remove_images::pull_image_if_not_present_or_forced \
-            "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        local res="$?"
+        if [[ ${res} == "0" ]]; then
+            # "Main" segment of production image
+            push_pull_remove_images::pull_image_if_not_present_or_forced \
+                "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+            res="$?"
+        fi
+        set -e
+        if [[ ${res} != "0" ]]; then
+            if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then
+                echo
+                echo "The PROD image cache does not exist. This is likely the first time you build the image"
+                echo "Switching to 'local' cache for docker images"
+                echo
+                DOCKER_CACHE="local"
+            else
+                echo
+                echo "The PROD image cache does not exist and we want to pull tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+                echo "Failing as we have to pull the tagged image in order to continue"
+                echo
+                return "${res}"
+            fi
+        fi
     fi
 }
 
@@ -203,17 +234,12 @@ function push_pull_remove_images::push_ci_images_to_github() {
     local airflow_ci_tagged_image="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_tagged_image}"
     push_pull_remove_images::push_image_with_retries "${airflow_ci_tagged_image}"
+    # Also push ci manifest iumage image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest"
     if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then
-        local airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+        local airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:latest"
         docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${airflow_ci_manifest_tagged_image}"
         push_pull_remove_images::push_image_with_retries "${airflow_ci_manifest_tagged_image}"
     fi
-    if [[ -n ${GITHUB_SHA=} ]]; then
-        # Also push image to GitHub registry with commit SHA
-        local airflow_ci_sha_image="${AIRFLOW_CI_IMAGE}:${COMMIT_SHA}"
-        docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_sha_image}"
-        push_pull_remove_images::push_image_with_retries "${airflow_ci_sha_image}"
-    fi
 }
 
 # Pushes PROD image to registry in GitHub
@@ -222,19 +248,18 @@ function push_pull_remove_images::push_ci_images_to_github() {
 #     "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds
 #     "latest"        - in case of push builds
 function push_pull_remove_images::push_prod_images_to_github () {
+    if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then
+        push_pull_remove_images::push_python_image_to_github
+    fi
     local airflow_prod_tagged_image="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_tagged_image}"
     push_pull_remove_images::push_image_with_retries "${airflow_prod_tagged_image}"
-    if [[ -n ${COMMIT_SHA=} ]]; then
-        # Also push image to GitHub registry with commit SHA
-        local airflow_prod_sha_image="${AIRFLOW_PROD_IMAGE}:${COMMIT_SHA}"
-        docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_sha_image}"
-        push_pull_remove_images::push_image_with_retries "${airflow_prod_sha_image}"
+    # Also push prod build image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest"
+    if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then
+        local airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:latest"
+        docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${airflow_prod_build_tagged_image}"
+        push_pull_remove_images::push_image_with_retries "${airflow_prod_build_tagged_image}"
     fi
-    # Also push prod build image
-    local airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
-    docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${airflow_prod_build_tagged_image}"
-    push_pull_remove_images::push_image_with_retries "${airflow_prod_build_tagged_image}"
 }
 
 # waits for an image to be available in GitHub Container Registry. Should be run with `set +e`
@@ -253,12 +278,18 @@ function push_pull_remove_images::check_image_manifest() {
 }
 
 # waits for an image to be available in the GitHub registry
+# Remove the fallback on 7th of August 2021
 function push_pull_remove_images::wait_for_image() {
     set +e
-    echo " Waiting for github registry image: " "$1"
+    echo " Waiting for github registry image: $1 with $2 fallback"
     while true
     do
         if push_pull_remove_images::check_image_manifest "$1"; then
+            export IMAGE_AVAILABLE="$1"
+            break
+        fi
+        if push_pull_remove_images::check_image_manifest "$2"; then
+            export IMAGE_AVAILABLE="$2"
             break
         fi
         sleep 30
diff --git a/scripts/ci/libraries/_script_init.sh b/scripts/ci/libraries/_script_init.sh
index 0f3c862..dc79fd5 100755
--- a/scripts/ci/libraries/_script_init.sh
+++ b/scripts/ci/libraries/_script_init.sh
@@ -41,7 +41,7 @@ build_images::determine_docker_cache_strategy
 
 initialization::get_environment_for_builds_on_ci
 
-build_images::get_docker_image_names
+build_images::get_docker_cache_image_names
 
 initialization::make_constants_read_only
 
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index e7a9144..ee3b506 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -46,10 +46,12 @@ function check_upgrade_to_newer_dependencies_needed() {
     # shellcheck disable=SC2153
     if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" != "false" ||
             ${GITHUB_EVENT_NAME=} == 'push' || ${GITHUB_EVENT_NAME=} == "scheduled" ]]; then
-        # Trigger upgrading to latest constraints where label is set or when
-        # SHA of the merge commit triggers rebuilding layer in the docker image
+        # Trigger upgrading to latest constraints when we are in push or schedule event or when it is forced
+        # By UPGRADE_TO_NEWER_DEPENDENCIES set to non-false. The variable is set to
+        # SHA of the merge commit - so that it always triggers rebuilding layer in the docker image
         # Each build that upgrades to latest constraints will get truly latest constraints, not those
-        # Cached in the image this way
+        # Cached in the image if we set it to "true". This upgrade_to_newer_dependencies variable
+        # can later be overridden in case we find that setup.* files changed (see below)
         upgrade_to_newer_dependencies="${INCOMING_COMMIT_SHA}"
     fi
 }
@@ -338,6 +340,8 @@ function check_if_setup_files_changed() {
     show_changed_files
 
     if [[ $(count_changed_files) != "0" ]]; then
+        # In case the setup files changed, we automatically force upgrading to newer dependencies
+        # no matter what was set before
         upgrade_to_newer_dependencies="${INCOMING_COMMIT_SHA}"
     fi
     start_end::group_end
diff --git a/scripts/ci/tools/fix_ownership.sh b/scripts/ci/tools/fix_ownership.sh
index 6ed1161..de15621 100755
--- a/scripts/ci/tools/fix_ownership.sh
+++ b/scripts/ci/tools/fix_ownership.sh
@@ -33,8 +33,12 @@ sanity_checks::sanitize_mounted_files
 
 read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)"
 
-docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
-    --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
-    "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
+if docker image inspect "${AIRFLOW_CI_IMAGE}" >/dev/null 2>&1; then
+    docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
+        --rm \
+        --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
+        "${AIRFLOW_CI_IMAGE}" \
+        -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
+else
+    echo "Skip fixing ownership as seems that you do not have the ${AIRFLOW_CI_IMAGE} image yet"
+fi
diff --git a/scripts/docker/install_additional_dependencies.sh b/scripts/docker/install_additional_dependencies.sh
index 6c035ae..4f9c05f 100755
--- a/scripts/docker/install_additional_dependencies.sh
+++ b/scripts/docker/install_additional_dependencies.sh
@@ -23,7 +23,6 @@ test -v ADDITIONAL_PYTHON_DEPS
 test -v EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS
 test -v AIRFLOW_INSTALL_USER_FLAG
 test -v AIRFLOW_PIP_VERSION
-test -v CONTINUE_ON_PIP_CHECK_FAILURE
 
 # shellcheck source=scripts/docker/common.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
@@ -41,7 +40,7 @@ function install_additional_dependencies() {
             ${ADDITIONAL_PYTHON_DEPS} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS}
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
-        pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
+        pip check
     else
         echo
         echo Installing additional dependencies upgrading only if needed
@@ -51,7 +50,7 @@ function install_additional_dependencies() {
             ${ADDITIONAL_PYTHON_DEPS}
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
-        pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
+        pip check
     fi
 }
 
diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh
index 4904027..e2bca4f 100755
--- a/scripts/docker/install_airflow.sh
+++ b/scripts/docker/install_airflow.sh
@@ -60,7 +60,7 @@ function install_airflow() {
 
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
-        pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
+        pip check
     else \
         echo
         echo Installing all packages with constraints and upgrade if needed
@@ -76,7 +76,7 @@ function install_airflow() {
             "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
-        pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
+        pip check
     fi
 
 }
diff --git a/scripts/docker/install_airflow_from_branch_tip.sh b/scripts/docker/install_airflow_dependencies_from_branch_tip.sh
similarity index 85%
rename from scripts/docker/install_airflow_from_branch_tip.sh
rename to scripts/docker/install_airflow_dependencies_from_branch_tip.sh
index 925a872..61aaa13 100755
--- a/scripts/docker/install_airflow_from_branch_tip.sh
+++ b/scripts/docker/install_airflow_dependencies_from_branch_tip.sh
@@ -30,28 +30,29 @@
 . "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
 
 
-function install_airflow_from_branch_tip() {
+function install_airflow_dependencies_from_branch_tip() {
     echo
     echo "Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies"
     echo
     if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then
        AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}
     fi
-    # Install latest set of dependencies using constraints
+    # Install latest set of dependencies using constraints. In case constraints were upgraded and there
+    # are conflicts, this might fail, but it should be fixed in the following installation steps
     pip install ${AIRFLOW_INSTALL_USER_FLAG} \
       "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
-      --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"
+      --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" || true
     # make sure correct PIP version is used
     pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
     pip freeze | grep apache-airflow-providers | xargs pip uninstall --yes || true
     echo
     echo Uninstalling just airflow. Dependencies remain.
     echo
-    pip uninstall --yes apache-airflow
+    pip uninstall --yes apache-airflow || true
 }
 
 common::get_airflow_version_specification
 common::override_pip_version_if_needed
 common::get_constraints_location
 
-install_airflow_from_branch_tip
+install_airflow_dependencies_from_branch_tip
diff --git a/scripts/docker/install_from_docker_context_files.sh b/scripts/docker/install_from_docker_context_files.sh
index 813d1b0..d8ed6bc 100755
--- a/scripts/docker/install_from_docker_context_files.sh
+++ b/scripts/docker/install_from_docker_context_files.sh
@@ -96,7 +96,7 @@ function install_airflow_and_providers_from_docker_context_files(){
 
     # make sure correct PIP version is left installed
     pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
-    pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
+    pip check
 
 }
 

[airflow] 13/22: Moves SchedulerJob initialization to within daemon context (#17157)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ca679720653242d3c656d41fb93d61a5c7c79bb8
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sun Aug 1 20:45:01 2021 +0200

    Moves SchedulerJob initialization to within daemon context (#17157)
    
    In Scheduler, the SchedulerJob was instantiated before demon context was
    activated. SchedulerJob is a database ORM object from SQL Alchemy and it
    opens the connection to Postgres:
    
    When you activate daemon context, what happens under the hood is forking
    the process, and while some of the opened sockets were passed to the
    forks (stdin and stderr but also the opened log file handle), the
    established socket for DB connection was not passed.
    
    As the result, when scheduler was started with --daemonize flag
    the error `SSL SYSCALL error: Socket operation on non-socket` was
    raised.
    
    The PR moves SchedulerJob initialization to within the context
    which makes the connection to Postgres initialized after the
    process has been forked and daemonized.
    
    Fixes: #17120
    (cherry picked from commit e8fc3acfd9884312669c1d85b71f42a9aab29cf8)
---
 airflow/cli/commands/scheduler_command.py | 16 +++++++++++-----
 1 file changed, 11 insertions(+), 5 deletions(-)

diff --git a/airflow/cli/commands/scheduler_command.py b/airflow/cli/commands/scheduler_command.py
index 368db6f..44674f0 100644
--- a/airflow/cli/commands/scheduler_command.py
+++ b/airflow/cli/commands/scheduler_command.py
@@ -29,17 +29,21 @@ from airflow.utils import cli as cli_utils
 from airflow.utils.cli import process_subdir, setup_locations, setup_logging, sigint_handler, sigquit_handler
 
 
+def _create_scheduler_job(args):
+    job = SchedulerJob(
+        subdir=process_subdir(args.subdir),
+        num_runs=args.num_runs,
+        do_pickle=args.do_pickle,
+    )
+    return job
+
+
 @cli_utils.action_logging
 def scheduler(args):
     """Starts Airflow Scheduler"""
     skip_serve_logs = args.skip_serve_logs
 
     print(settings.HEADER)
-    job = SchedulerJob(
-        subdir=process_subdir(args.subdir),
-        num_runs=args.num_runs,
-        do_pickle=args.do_pickle,
-    )
 
     if args.daemon:
         pid, stdout, stderr, log_file = setup_locations(
@@ -54,9 +58,11 @@ def scheduler(args):
                 stderr=stderr_handle,
             )
             with ctx:
+                job = _create_scheduler_job(args)
                 sub_proc = _serve_logs(skip_serve_logs)
                 job.run()
     else:
+        job = _create_scheduler_job(args)
         signal.signal(signal.SIGINT, sigint_handler)
         signal.signal(signal.SIGTERM, sigint_handler)
         signal.signal(signal.SIGQUIT, sigquit_handler)

[airflow] 05/22: Remove support for Airflow 1.10 cmds in entrypoint_prod.sh (#17248)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 55dccc704f37d1d146e6273966d7ada48e901a92
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Jul 27 14:45:07 2021 +0200

    Remove support for Airflow 1.10 cmds in entrypoint_prod.sh (#17248)
    
    (cherry picked from commit 00ffae851b17b779581944734a9bf03bf8f63efc)
---
 docs/docker-stack/entrypoint.rst             | 2 +-
 scripts/in_container/prod/entrypoint_prod.sh | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst
index dad13f4..a999892 100644
--- a/docs/docker-stack/entrypoint.rst
+++ b/docs/docker-stack/entrypoint.rst
@@ -105,7 +105,7 @@ To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
 Waits for celery broker connection
 ----------------------------------
 
-In case CeleryExecutor is used, and one of the ``scheduler``, ``celery``, ``worker``, or ``flower``
+In case CeleryExecutor is used, and one of the ``scheduler``, ``celery``
 commands are used the entrypoint will wait until the celery broker DB connection is available.
 
 The script detects backend type depending on the URL schema and assigns default port numbers if not specified
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index d4a2e8e..674b569 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -319,7 +319,7 @@ if [[ ${AIRFLOW_COMMAND} == "airflow" ]]; then
 fi
 
 # Note: the broker backend configuration concerns only a subset of Airflow components
-if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]] \
+if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery)$ ]] \
     && [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
     wait_for_celery_broker
 fi

[airflow] 17/22: Grammar and clarity pass on documentation (#17318)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 67f04d205ea8ae5e91f521548ce7d84ace2b2e6c
Author: Collin McNulty <co...@gmail.com>
AuthorDate: Thu Jul 29 10:48:27 2021 -0500

    Grammar and clarity pass on documentation (#17318)
    
    Minor grammar edits, fixes to broken links, and rewording for clarification.
    
    There are a few changes that others may disagree with me about:
    - Changed "outwith" to "instead of"
    - All non-code references I found to "time-zone" or "timezone" changed to "time zone"
    - It seems like top level pages are supposed to have capitalized words other than articles and prepositions, but two pages were not following this convention. I have changed them to conform to the others.
    - I found a sentence in the health checks section extremely confusing. I took my best attempt to restate it clearly, but I'm not sure I understood it well enough to restate it correctly.
    
    (cherry picked from commit 7b10b56a67ff935f28197e1916972b29398f667e)
---
 docs/apache-airflow/index.rst                        |  2 +-
 .../logging-monitoring/check-health.rst              |  8 ++++++--
 docs/apache-airflow/logging-monitoring/errors.rst    |  4 ++--
 docs/apache-airflow/logging-monitoring/metrics.rst   |  6 +++---
 docs/apache-airflow/start/index.rst                  |  2 +-
 docs/apache-airflow/timezone.rst                     | 20 ++++++++++----------
 6 files changed, 23 insertions(+), 19 deletions(-)

diff --git a/docs/apache-airflow/index.rst b/docs/apache-airflow/index.rst
index ba9db5d..18b2f7a 100644
--- a/docs/apache-airflow/index.rst
+++ b/docs/apache-airflow/index.rst
@@ -97,7 +97,7 @@ unit of work and continuity.
     lineage
     dag-serialization
     modules_management
-    Release policies <release-process>
+    Release Policies <release-process>
     changelog
     best-practices
     production-deployment
diff --git a/docs/apache-airflow/logging-monitoring/check-health.rst b/docs/apache-airflow/logging-monitoring/check-health.rst
index a5f8664..4468deb 100644
--- a/docs/apache-airflow/logging-monitoring/check-health.rst
+++ b/docs/apache-airflow/logging-monitoring/check-health.rst
@@ -20,9 +20,13 @@
 Checking Airflow Health Status
 ==============================
 
-Airflow has two methods to check the health of components - HTTP checks and CLI checks. Their choice depends on the role of the component as well as what tools it uses to monitor the deployment.
+Airflow has two methods to check the health of components - HTTP checks and CLI checks. All available checks are
+accessible through the CLI, but only some are accessible through HTTP due to the role of the component being checked
+and the tools being used to monitor the deployment.
 
-For example, when running on Kubernetes, use `a Liveness probes <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>`__ (``livenessProbe`` property) with :ref:`CLI checks <check-health/cli-checks-for-scheduler>` on the scheduler deployment to restart it when it fail. For the webserver, you can configure the readiness probe (``readinessProbe`` property) using :ref:`check-health/http-endpoint`.
+For example, when running on Kubernetes, use `a Liveness probes <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>`__ (``livenessProbe`` property)
+with :ref:`CLI checks <check-health/cli-checks-for-scheduler>` on the scheduler deployment to restart it when it fails.
+For the webserver, you can configure the readiness probe (``readinessProbe`` property) using :ref:`check-health/http-endpoint`.
 
 For an example for a Docker Compose environment, see the ``docker-compose.yaml`` file available in the :doc:`/start/docker`.
 
diff --git a/docs/apache-airflow/logging-monitoring/errors.rst b/docs/apache-airflow/logging-monitoring/errors.rst
index 37ed307..578666b 100644
--- a/docs/apache-airflow/logging-monitoring/errors.rst
+++ b/docs/apache-airflow/logging-monitoring/errors.rst
@@ -41,7 +41,7 @@ Add your ``SENTRY_DSN`` to your configuration file e.g. ``airflow.cfg`` in ``[se
 .. note::
     If this value is not provided, the SDK will try to read it from the ``SENTRY_DSN`` environment variable.
 
-You can supply `additional configuration options <https://docs.sentry.io/error-reporting/configuration/?platform=python>`__ based on the Python platform via ``[sentry]`` section.
+You can supply `additional configuration options <https://docs.sentry.io/platforms/python/configuration/options>`__ based on the Python platform via ``[sentry]`` section.
 Unsupported options: ``integrations``, ``in_app_include``, ``in_app_exclude``, ``ignore_errors``, ``before_breadcrumb``, ``before_send``, ``transport``.
 
 Tags
@@ -60,7 +60,7 @@ Breadcrumbs
 ------------
 
 
-When a task fails with an error `breadcrumbs <https://docs.sentry.io/enriching-error-data/breadcrumbs/?platform=python>`__ will be added for the other tasks in the current dag run.
+When a task fails with an error `breadcrumbs <https://docs.sentry.io/platforms/python/enriching-events/breadcrumbs/>`__ will be added for the other tasks in the current dag run.
 
 ======================================= ==============================================================
 Name                                    Description
diff --git a/docs/apache-airflow/logging-monitoring/metrics.rst b/docs/apache-airflow/logging-monitoring/metrics.rst
index d410261..b55ad92 100644
--- a/docs/apache-airflow/logging-monitoring/metrics.rst
+++ b/docs/apache-airflow/logging-monitoring/metrics.rst
@@ -50,15 +50,15 @@ the metrics that start with the elements of the list:
     statsd_allow_list = scheduler,executor,dagrun
 
 If you want to redirect metrics to different name, you can configure ``stat_name_handler`` option
-in ``[scheduler]`` section.  It should point to a function that validate the statsd stat name, apply changes
-to the stat name if necessary and return the transformed stat name. The function may looks as follow:
+in ``[scheduler]`` section.  It should point to a function that validates the statsd stat name, applies changes
+to the stat name if necessary, and returns the transformed stat name. The function may looks as follow:
 
 .. code-block:: python
 
     def my_custom_stat_name_handler(stat_name: str) -> str:
         return stat_name.lower()[:32]
 
-If you want to use a custom Statsd client outwith the default one provided by Airflow the following key must be added
+If you want to use a custom Statsd client instead of the default one provided by Airflow, the following key must be added
 to the configuration file alongside the module path of your custom Statsd client. This module must be available on
 your :envvar:`PYTHONPATH`.
 
diff --git a/docs/apache-airflow/start/index.rst b/docs/apache-airflow/start/index.rst
index c86ef83..b8f0c0b 100644
--- a/docs/apache-airflow/start/index.rst
+++ b/docs/apache-airflow/start/index.rst
@@ -15,7 +15,7 @@
     specific language governing permissions and limitations
     under the License.
 
-Quick start
+Quick Start
 ===========
 
 This section contains quick start guides to help you get up and running with Apache Airflow.
diff --git a/docs/apache-airflow/timezone.rst b/docs/apache-airflow/timezone.rst
index 63d1cec..d543c60 100644
--- a/docs/apache-airflow/timezone.rst
+++ b/docs/apache-airflow/timezone.rst
@@ -17,22 +17,22 @@
 
 
 
-Time zones
+Time Zones
 ==========
 
 Support for time zones is enabled by default. Airflow stores datetime information in UTC internally and in the database.
-It allows you to run your DAGs with time zone dependent schedules. At the moment Airflow does not convert them to the
-end user’s time zone in the user interface. There it will always be displayed in UTC. Also templates used in Operators
-are not converted. Time zone information is exposed and it is up to the writer of DAG what do with it.
+It allows you to run your DAGs with time zone dependent schedules. At the moment, Airflow does not convert them to the
+end user’s time zone in the user interface. It will always be displayed in UTC there. Also, templates used in Operators
+are not converted. Time zone information is exposed and it is up to the writer of DAG to decide what do with it.
 
 This is handy if your users live in more than one time zone and you want to display datetime information according to
 each user’s wall clock.
 
-Even if you are running Airflow in only one time zone it is still good practice to store data in UTC in your database
-(also before Airflow became time zone aware this was also to recommended or even required setup). The main reason is
-Daylight Saving Time (DST). Many countries have a system of DST, where clocks are moved forward in spring and backward
+Even if you are running Airflow in only one time zone, it is still good practice to store data in UTC in your database
+(also before Airflow became time zone aware this was also the recommended or even required setup). The main reason is
+that many countries use Daylight Saving Time (DST), where clocks are moved forward in spring and backward
 in autumn. If you’re working in local time, you’re likely to encounter errors twice a year, when the transitions
-happen. (The pendulum and pytz documentation discusses these issues in greater detail.) This probably doesn’t matter
+happen. (The pendulum and pytz documentation discuss these issues in greater detail.) This probably doesn’t matter
 for a simple DAG, but it’s a problem if you are in, for example, financial services where you have end of day
 deadlines to meet.
 
@@ -68,7 +68,7 @@ a datetime object is aware. Otherwise, it’s naive.
 
 You can use ``timezone.is_localized()`` and ``timezone.is_naive()`` to determine whether datetimes are aware or naive.
 
-Because Airflow uses time-zone-aware datetime objects. If your code creates datetime objects they need to be aware too.
+Because Airflow uses time zone aware datetime objects. If your code creates datetime objects they need to be aware too.
 
 .. code-block:: python
 
@@ -103,7 +103,7 @@ Unfortunately, during DST transitions, some datetimes don’t exist or are ambig
 In such situations, pendulum raises an exception. That’s why you should always create aware
 datetime objects when time zone support is enabled.
 
-In practice, this is rarely an issue. Airflow gives you aware datetime objects in the models and DAGs, and most often,
+In practice, this is rarely an issue. Airflow gives you time zone aware datetime objects in the models and DAGs, and most often,
 new datetime objects are created from existing ones through timedelta arithmetic. The only datetime that’s often
 created in application code is the current time, and ``timezone.utcnow()`` automatically does the right thing.
 

[airflow] 12/22: Adds compile_assets to INSTALL (#17377)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit cc51e1fda4bd7d2cedc756435c1a1ac7425b97e5
Author: Vishal Gupta <vi...@gmail.com>
AuthorDate: Mon Aug 2 16:17:52 2021 +0530

    Adds compile_assets to INSTALL (#17377)
    
    (cherry picked from commit 0e6e04e5f80eaf186d28ac62d4178e971ccf32bc)
---
 INSTALL | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/INSTALL b/INSTALL
index ced87c9..111b51f 100644
--- a/INSTALL
+++ b/INSTALL
@@ -104,3 +104,8 @@ telegram, trino, vertica, virtualenv, webhdfs, winrm, yandex, zendesk
 # END EXTRAS HERE
 
 # For installing Airflow in development environments - see CONTRIBUTING.rst
+
+# COMPILING FRONT-END ASSETS (in case you see "Please make sure to build the frontend in static/ directory and then restart the server")
+# Optional : Installing yarn - https://classic.yarnpkg.com/en/docs/install
+
+python setup.py compile_assets

[airflow] 10/22: Fix typo in build_images (#17327)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0582ccec0b3c967aaa9125f6667d69a5cacd8d8d
Author: Jed Cunningham <66...@users.noreply.github.com>
AuthorDate: Thu Jul 29 22:24:50 2021 -0600

    Fix typo in build_images (#17327)
    
    (cherry picked from commit 78506e3e94f70ddcb50a8290fcce9c209515bbaf)
---
 scripts/ci/libraries/_build_images.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 94f5c8e..ca94e4c 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -193,7 +193,7 @@ function build_images::confirm_image_rebuild() {
         echo  "${COLOR_RED}ERROR: The ${THE_IMAGE_TYPE} needs to be rebuilt - it is outdated.   ${COLOR_RESET}"
         echo """
 
-   Make sure you build the images bu running
+   Make sure you build the images by running:
 
       ./breeze --python ${PYTHON_MAJOR_MINOR_VERSION} build-image
 

[airflow] 16/22: docs: fix inconsistencies in configuration docs (#17317)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7ba20f5cb019f4e37292dacdce9561d12b83312a
Author: mgorsk1 <go...@gmail.com>
AuthorDate: Thu Jul 29 21:36:57 2021 +0200

    docs: fix inconsistencies in configuration docs (#17317)
    
    (cherry picked from commit 2020a544c8208c8c3c9763cf0dbb6b2e1a145727)
---
 airflow/config_templates/config.yml          | 14 ++++----------
 airflow/config_templates/default_airflow.cfg | 14 ++++----------
 2 files changed, 8 insertions(+), 20 deletions(-)

diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml
index 9f7c640..9945213 100644
--- a/airflow/config_templates/config.yml
+++ b/airflow/config_templates/config.yml
@@ -1819,36 +1819,30 @@
       default: "True"
     - name: max_dagruns_to_create_per_loop
       description: |
-        Max number of DAGs to create DagRuns for per scheduler loop
-
-        Default: 10
+        Max number of DAGs to create DagRuns for per scheduler loop.
       example: ~
       version_added: 2.0.0
       type: string
-      default: ~
+      default: "10"
       see_also: ":ref:`scheduler:ha:tunables`"
     - name: max_dagruns_per_loop_to_schedule
       description: |
         How many DagRuns should a scheduler examine (and lock) when scheduling
         and queuing tasks.
-
-        Default: 20
       example: ~
       version_added: 2.0.0
       type: string
-      default: ~
+      default: "20"
       see_also: ":ref:`scheduler:ha:tunables`"
     - name: schedule_after_task_execution
       description: |
         Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the
         same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other
         dags in some circumstances
-
-        Default: True
       example: ~
       version_added: 2.0.0
       type: boolean
-      default: ~
+      default: "True"
     - name: parsing_processes
       description: |
         The scheduler can run multiple processes in parallel to parse dags.
diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg
index 1cdf8b4..03d5e1f 100644
--- a/airflow/config_templates/default_airflow.cfg
+++ b/airflow/config_templates/default_airflow.cfg
@@ -909,23 +909,17 @@ max_tis_per_query = 512
 # scheduler at once
 use_row_level_locking = True
 
-# Max number of DAGs to create DagRuns for per scheduler loop
-#
-# Default: 10
-# max_dagruns_to_create_per_loop =
+# Max number of DAGs to create DagRuns for per scheduler loop.
+max_dagruns_to_create_per_loop = 10
 
 # How many DagRuns should a scheduler examine (and lock) when scheduling
 # and queuing tasks.
-#
-# Default: 20
-# max_dagruns_per_loop_to_schedule =
+max_dagruns_per_loop_to_schedule = 20
 
 # Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the
 # same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other
 # dags in some circumstances
-#
-# Default: True
-# schedule_after_task_execution =
+schedule_after_task_execution = True
 
 # The scheduler can run multiple processes in parallel to parse dags.
 # This defines how many processes will run.

[airflow] 14/22: Update best-practices.rst (#17357)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 358a1cca3fe4f514c57a2627dbff9855f473de3f
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sat Jul 31 13:00:49 2021 +0200

    Update best-practices.rst (#17357)
    
    (cherry picked from commit d2a43f372962581aff5825c1c834cc6298b736be)
---
 docs/apache-airflow/best-practices.rst | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/docs/apache-airflow/best-practices.rst b/docs/apache-airflow/best-practices.rst
index b2ae4ae..9e89865 100644
--- a/docs/apache-airflow/best-practices.rst
+++ b/docs/apache-airflow/best-practices.rst
@@ -62,7 +62,9 @@ Some of the ways you can avoid producing a different result -
 .. tip::
 
     You should define repetitive parameters such as ``connection_id`` or S3 paths in ``default_args`` rather than declaring them for each task.
-    The ``default_args`` help to avoid mistakes such as typographical errors.
+    The ``default_args`` help to avoid mistakes such as typographical errors. Also, most connection types have unique parameter names in
+    tasks, so you can declare a connection only once in ``default_args`` (for example ``gcp_conn_id``) and it is automatically
+    used by all operators that use this connection type.
 
 Deleting a task
 ----------------

[airflow] 20/22: Doc: Strip unnecessary arguments from MariaDB JIRA URL (#17296)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit bb59cc5251f5c825ec3d3c1be6ba80a712d3b74f
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Jul 28 21:26:24 2021 +0100

    Doc: Strip unnecessary arguments from MariaDB JIRA URL (#17296)
    
    This was included in https://github.com/apache/airflow/pull/17287 but we can strip other args
    
    (cherry picked from commit 8e76f3ca50c736629f8ad3d3b7d4851fb4bd90e0)
---
 docs/apache-airflow/concepts/scheduler.rst | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/docs/apache-airflow/concepts/scheduler.rst b/docs/apache-airflow/concepts/scheduler.rst
index 6ea5ff2..0a1079e 100644
--- a/docs/apache-airflow/concepts/scheduler.rst
+++ b/docs/apache-airflow/concepts/scheduler.rst
@@ -123,13 +123,14 @@ The following databases are fully supported and provide an "optimal" experience:
 
 .. warning::
 
-  MariaDB does not implement the ``SKIP LOCKED`` or ``NOWAIT`` SQL clauses (see `MDEV-13115
-  <https://jira.mariadb.org/browse/MDEV-13115>`_). Without these features running multiple schedulers is not
-  supported and deadlock errors have been reported.
+  MariaDB did not implement the ``SKIP LOCKED`` or ``NOWAIT`` SQL clauses until version
+  `10.6.0 <https://jira.mariadb.org/browse/MDEV-25433>`_.
+  Without these features, running multiple schedulers is not supported and deadlock errors have been reported. MariaDB
+  10.6.0 and following may work appropriately with multiple schedulers, but this has not been tested.
 
 .. warning::
 
-  MySQL 5.x also does not support ``SKIP LOCKED`` or ``NOWAIT``, and additionally is more prone to deciding
+  MySQL 5.x does not support ``SKIP LOCKED`` or ``NOWAIT``, and additionally is more prone to deciding
   queries are deadlocked, so running with more than a single scheduler on MySQL 5.x is not supported or
   recommended.
 

[airflow] 09/22: Fix breeze kind-cluster deploy failing with ECONREFUSED (#17293)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 029a709220adf641e8574919f0307d1d26eb06ea
Author: Ephraim Anierobi <sp...@gmail.com>
AuthorDate: Thu Jul 29 02:13:50 2021 +0100

    Fix breeze kind-cluster deploy failing with ECONREFUSED (#17293)
    
    Currently, kind-cluster deploy fails occasionally due to yarn install when compiling
    assets. This PR fixes it by using the recommended option --network-concurrency=1 when
    running yarn install
    
    (cherry picked from commit 53e9349d68a6c3000139417e6df2271f3d589ebc)
---
 scripts/docker/compile_www_assets.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/docker/compile_www_assets.sh b/scripts/docker/compile_www_assets.sh
index 59a7017..50e1318 100755
--- a/scripts/docker/compile_www_assets.sh
+++ b/scripts/docker/compile_www_assets.sh
@@ -35,7 +35,7 @@ function compile_www_assets() {
         www_dir="$(python -m site --user-site)/airflow/www"
     fi
     pushd ${www_dir} || exit 1
-    yarn install --frozen-lockfile --no-cache
+    yarn install --frozen-lockfile --no-cache --network-concurrency=1
     yarn run prod
     find package.json yarn.lock static/css static/js -type f | sort | xargs md5sum > "${md5sum_file}"
     rm -rf "${www_dir}/node_modules"

[airflow] 11/22: Added print statements for clarity in provider yaml checks (#17322)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 9a09b29dd74adea8e1fbf9d2a7c500cb2b852a9e
Author: Kanthi <su...@gmail.com>
AuthorDate: Fri Jul 30 15:18:26 2021 -0400

    Added print statements for clarity in provider yaml checks (#17322)
    
    (cherry picked from commit 76e6315473671b87f3d5fe64e4c35a79658789d3)
---
 .../ci/pre_commit/pre_commit_check_provider_yaml_files.py  | 14 ++++++++++----
 1 file changed, 10 insertions(+), 4 deletions(-)

diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
index 24d963b..c6c0584 100755
--- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
+++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
@@ -119,13 +119,13 @@ def assert_sets_equal(set1, set2):
 
     lines = []
     if difference1:
-        lines.append('Items in the first set but not the second:')
+        lines.append('    -- Items in the left set but not the right:')
         for item in sorted(difference1):
-            lines.append(repr(item))
+            lines.append(f'       {item!r}')
     if difference2:
-        lines.append('Items in the second set but not the first:')
+        lines.append('    -- Items in the right set but not the left:')
         for item in sorted(difference2):
-            lines.append(repr(item))
+            lines.append(f'       {item!r}')
 
     standard_msg = '\n'.join(lines)
     raise AssertionError(standard_msg)
@@ -155,6 +155,7 @@ def parse_module_data(provider_data, resource_type, yaml_file_path):
 
 def check_completeness_of_list_of_hooks_sensors_hooks(yaml_files: Dict[str, Dict]):
     print("Checking completeness of list of {sensors, hooks, operators}")
+    print(" -- {sensors, hooks, operators} - Expected modules(Left): Current Modules(Right)")
     for (yaml_file_path, provider_data), resource_type in product(
         yaml_files.items(), ["sensors", "operators", "hooks"]
     ):
@@ -193,6 +194,8 @@ def check_duplicates_in_integrations_names_of_hooks_sensors_operators(yaml_files
 def check_completeness_of_list_of_transfers(yaml_files: Dict[str, Dict]):
     print("Checking completeness of list of transfers")
     resource_type = 'transfers'
+
+    print(" -- Expected transfers modules(Left): Current transfers Modules(Right)")
     for yaml_file_path, provider_data in yaml_files.items():
         expected_modules, provider_package, resource_data = parse_module_data(
             provider_data, resource_type, yaml_file_path
@@ -309,7 +312,10 @@ def check_doc_files(yaml_files: Dict[str, Dict]):
     }
 
     try:
+        print(" -- Checking document urls: expected(left), current(right)")
         assert_sets_equal(set(expected_doc_urls), set(current_doc_urls))
+
+        print(" -- Checking logo urls: expected(left), current(right)")
         assert_sets_equal(set(expected_logo_urls), set(current_logo_urls))
     except AssertionError as ex:
         print(ex)

[airflow] 01/22: Support secret backends/airflow.cfg for celery broker in entrypoint_prod.sh (#17069)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 696352baa10e093d6350d6d8d40137a3d65c31a7
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Mon Jul 19 14:46:18 2021 +0200

    Support secret backends/airflow.cfg for celery broker in entrypoint_prod.sh (#17069)
    
    * Support secret backends in entrypoint_prod.sh
    
    * Update entrypoint_prod.sh
    
    (cherry picked from commit c1ff75efe5a2b5638148c4d0359cdb1a83b67806)
---
 docs/docker-stack/entrypoint.rst             |  9 ++-------
 scripts/in_container/prod/entrypoint_prod.sh | 17 ++++++++---------
 2 files changed, 10 insertions(+), 16 deletions(-)

diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst
index 0a0de5d..dad13f4 100644
--- a/docs/docker-stack/entrypoint.rst
+++ b/docs/docker-stack/entrypoint.rst
@@ -105,7 +105,7 @@ To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
 Waits for celery broker connection
 ----------------------------------
 
-In case Postgres or MySQL DB is used, and one of the ``scheduler``, ``celery``, ``worker``, or ``flower``
+In case CeleryExecutor is used, and one of the ``scheduler``, ``celery``, ``worker``, or ``flower``
 commands are used the entrypoint will wait until the celery broker DB connection is available.
 
 The script detects backend type depending on the URL schema and assigns default port numbers if not specified
@@ -120,12 +120,7 @@ Supported schemes:
 * ``postgres://``            - default port 5432
 * ``mysql://``               - default port 3306
 
-Waiting for connection involves checking if a matching port is open.
-The host information is derived from the variables :envvar:`AIRFLOW__CELERY__BROKER_URL` and
-:envvar:`AIRFLOW__CELERY__BROKER_URL_CMD`. If :envvar:`AIRFLOW__CELERY__BROKER_URL_CMD` variable
-is passed to the container, it is evaluated as a command to execute and result of this evaluation is used
-as :envvar:`AIRFLOW__CELERY__BROKER_URL`. The :envvar:`AIRFLOW__CELERY__BROKER_URL_CMD` variable
-takes precedence over the :envvar:`AIRFLOW__CELERY__BROKER_URL` variable.
+Waiting for connection involves checking if a matching port is open. The host information is derived from the Airflow configuration.
 
 .. _entrypoint:commands:
 
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index 9d8e74a..d4a2e8e 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -195,15 +195,14 @@ function upgrade_db() {
     airflow db upgrade || true
 }
 
-function wait_for_celery_backend() {
+function wait_for_celery_broker() {
     # Verifies connection to Celery Broker
-    if [[ -n "${AIRFLOW__CELERY__BROKER_URL_CMD=}" ]]; then
-        wait_for_connection "$(eval "${AIRFLOW__CELERY__BROKER_URL_CMD}")"
-    else
-        AIRFLOW__CELERY__BROKER_URL=${AIRFLOW__CELERY__BROKER_URL:=}
-        if [[ -n ${AIRFLOW__CELERY__BROKER_URL=} ]]; then
-            wait_for_connection "${AIRFLOW__CELERY__BROKER_URL}"
-        fi
+    local executor
+    executor="$(airflow config get-value core executor)"
+    if [[ "${executor}" == "CeleryExecutor" ]]; then
+        local connection_url
+        connection_url="$(airflow config get-value celery broker_url)"
+        wait_for_connection "${connection_url}"
     fi
 }
 
@@ -322,7 +321,7 @@ fi
 # Note: the broker backend configuration concerns only a subset of Airflow components
 if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]] \
     && [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
-    wait_for_celery_backend
+    wait_for_celery_broker
 fi
 
 exec "airflow" "${@}"

[airflow] 15/22: Fix link (#17351)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f70ef6d808007e2c99b2db9a9e350f3773bfa2f7
Author: Attila Tóth <at...@gmail.com>
AuthorDate: Sat Jul 31 09:10:06 2021 +0200

    Fix link (#17351)
    
    (cherry picked from commit 07fc4508fd8d3f4811029b14ee5813b8635fb312)
---
 docs/apache-airflow/start/docker.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/apache-airflow/start/docker.rst b/docs/apache-airflow/start/docker.rst
index 32bf4c2..77c9333 100644
--- a/docs/apache-airflow/start/docker.rst
+++ b/docs/apache-airflow/start/docker.rst
@@ -80,7 +80,7 @@ On **Linux**, the mounted volumes in container use the native Linux filesystem u
     mkdir ./dags ./logs ./plugins
     echo -e "AIRFLOW_UID=$(id -u)\nAIRFLOW_GID=0" > .env
 
-See:ref:`Docker Compose environment variables <docker-compose-env-variables>`
+See :ref:`Docker Compose environment variables <docker-compose-env-variables>`
 
 On **all operating systems**, you need to run database migrations and create the first user account. To do it, run.
 

[airflow] 21/22: Fix typo in webserver.rst (#17288)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit d415256cd0ac3d300a9d541e2ca67a274f44a5da
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Jul 28 17:37:14 2021 +0100

    Fix typo in webserver.rst (#17288)
    
    Porting https://github.com/apache/airflow-site/pull/454 to Airflow docs. Added @pumpkiny9120 as co-author
    
    Co-authored-by: Yanan Valencia <pu...@gmail.com>
    (cherry picked from commit 642d05be4299e86310eb1b61a9991bad17e7d95f)
---
 docs/apache-airflow/security/webserver.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/apache-airflow/security/webserver.rst b/docs/apache-airflow/security/webserver.rst
index 5fb03c5..3906abf 100644
--- a/docs/apache-airflow/security/webserver.rst
+++ b/docs/apache-airflow/security/webserver.rst
@@ -90,7 +90,7 @@ Other Methods
 '''''''''''''
 
 Since the Airflow 2.0, the default UI is the Flask App Builder RBAC. A ``webserver_config.py`` configuration file
-it's automatically generated and can be used to configure the Airflow to support authentication
+is automatically generated and can be used to configure the Airflow to support authentication
 methods like OAuth, OpenID, LDAP, REMOTE_USER.
 
 For previous versions from Airflow, the ``$AIRFLOW_HOME/airflow.cfg`` following entry needs to be set to enable
@@ -107,7 +107,7 @@ with the following entry in the ``$AIRFLOW_HOME/webserver_config.py``.
 
     AUTH_TYPE = AUTH_DB
 
-Another way to create users it's in the UI login page, allowing user self registration through a "Register" button.
+Another way to create users is in the UI login page, allowing user self registration through a "Register" button.
 The following entries in the ``$AIRFLOW_HOME/webserver_config.py`` can be edited to make it possible:
 
 .. code-block:: ini

[airflow] 19/22: More optimized lazy-loading of provider information (#17304)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e1a3acf6acc0eb20470e44d7698146fd34d86001
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 29 12:14:58 2021 +0200

    More optimized lazy-loading of provider information (#17304)
    
    With this change we truly lazy-load hooks and external_links only
    when we need them. Previously they were loaded when any of the
    properties of ProvidersManager was used, but with this change
    in some scenarios where only extra links are used or when we
    only need list of providers, but we do not need details on
    which custom hooks are needed, there will be much
    faster initialization. This is mainly for some CLI commands
    (for example `airlfow providers list` is much faster now), but
    also in some scenarios where for example .get_conn() is never
    used in Tasks, tasks might also never need to import/load the hooks
    and they might perform faster, with smaller memory footprint.
    
    (cherry picked from commit 2dc7aa8e7d5c964076500eac2eaac38507da5841)
---
 airflow/__init__.py          |  6 +++--
 airflow/providers_manager.py | 62 ++++++++++++++++++++++++++++++++++----------
 2 files changed, 52 insertions(+), 16 deletions(-)

diff --git a/airflow/__init__.py b/airflow/__init__.py
index a448491..9f9073e 100644
--- a/airflow/__init__.py
+++ b/airflow/__init__.py
@@ -74,11 +74,13 @@ if not settings.LAZY_LOAD_PLUGINS:
 if not settings.LAZY_LOAD_PROVIDERS:
     from airflow import providers_manager
 
-    providers_manager.ProvidersManager().initialize_providers_manager()
+    manager = providers_manager.ProvidersManager()
+    manager.initialize_providers_list()
+    manager.initialize_providers_hooks()
+    manager.initialize_providers_extra_links()
 
 
 # This is never executed, but tricks static analyzers (PyDev, PyCharm,)
-# into knowing the types of these symbols, and what
 # they contain.
 STATICA_HACK = True
 globals()['kcah_acitats'[::-1].upper()] = False
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index 0770f24..5080995 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -22,6 +22,7 @@ import json
 import logging
 import os
 from collections import OrderedDict
+from time import perf_counter
 from typing import Any, Dict, NamedTuple, Set
 
 import jsonschema
@@ -29,6 +30,7 @@ from wtforms import Field
 
 from airflow.utils import yaml
 from airflow.utils.entry_points import entry_points_with_dist
+from airflow.utils.log.logging_mixin import LoggingMixin
 
 try:
     import importlib.resources as importlib_resources
@@ -81,7 +83,7 @@ class ConnectionFormWidgetInfo(NamedTuple):
     field: Field
 
 
-class ProvidersManager:
+class ProvidersManager(LoggingMixin):
     """
     Manages all provider packages. This is a Singleton class. The first time it is
     instantiated, it discovers all available providers in installed packages and
@@ -97,6 +99,7 @@ class ProvidersManager:
         return cls._instance
 
     def __init__(self):
+        """Initializes the manager."""
         # Keeps dict of providers keyed by module name
         self._provider_dict: Dict[str, ProviderInfo] = {}
         # Keeps dict of hooks keyed by connection type
@@ -110,30 +113,61 @@ class ProvidersManager:
         self._customized_form_fields_schema_validator = (
             _create_customized_form_field_behaviours_schema_validator()
         )
-        self._initialized = False
+        self._providers_list_initialized = False
+        self._providers_hooks_initialized = False
+        self._providers_extra_links_initialized = False
 
-    def initialize_providers_manager(self):
-        """Lazy initialization of provider data."""
+    def initialize_providers_list(self):
+        """Lazy initialization of providers list."""
         # We cannot use @cache here because it does not work during pytest, apparently each test
         # runs it it's own namespace and ProvidersManager is a different object in each namespace
-        # even if it is singleton but @cache on the initialize_providers_manager message still works in the
+        # even if it is singleton but @cache on the initialize_providers_*  still works in the
         # way that it is called only once for one of the objects (at least this is how it looks like
         # from running tests)
-        if self._initialized:
+        if self._providers_list_initialized:
             return
+        start_time = perf_counter()
+        self.log.debug("Initializing Providers Manager list")
         # Local source folders are loaded first. They should take precedence over the package ones for
         # Development purpose. In production provider.yaml files are not present in the 'airflow" directory
         # So there is no risk we are going to override package provider accidentally. This can only happen
         # in case of local development
         self._discover_all_airflow_builtin_providers_from_local_sources()
         self._discover_all_providers_from_packages()
-        self._discover_hooks()
         self._provider_dict = OrderedDict(sorted(self._provider_dict.items()))
+        self.log.debug(
+            "Initialization of Providers Manager list took %.2f seconds", perf_counter() - start_time
+        )
+        self._providers_list_initialized = True
+
+    def initialize_providers_hooks(self):
+        """Lazy initialization of providers hooks."""
+        if self._providers_hooks_initialized:
+            return
+        self.initialize_providers_list()
+        start_time = perf_counter()
+        self.log.debug("Initializing Providers Hooks")
+        self._discover_hooks()
         self._hooks_dict = OrderedDict(sorted(self._hooks_dict.items()))
         self._connection_form_widgets = OrderedDict(sorted(self._connection_form_widgets.items()))
         self._field_behaviours = OrderedDict(sorted(self._field_behaviours.items()))
+        self.log.debug(
+            "Initialization of Providers Manager hooks took %.2f seconds", perf_counter() - start_time
+        )
+        self._providers_hooks_initialized = True
+
+    def initialize_providers_extra_links(self):
+        """Lazy initialization of providers extra links."""
+        if self._providers_extra_links_initialized:
+            return
+        self.initialize_providers_list()
+        start_time = perf_counter()
+        self.log.debug("Initializing Providers Extra Links")
         self._discover_extra_links()
-        self._initialized = True
+        self.log.debug(
+            "Initialization of Providers Manager extra links took %.2f seconds", perf_counter() - start_time
+        )
+        self._providers_extra_links_initialized = True
 
     def _discover_all_providers_from_packages(self) -> None:
         """
@@ -385,29 +419,29 @@ class ProvidersManager:
     @property
     def providers(self) -> Dict[str, ProviderInfo]:
         """Returns information about available providers."""
-        self.initialize_providers_manager()
+        self.initialize_providers_list()
         return self._provider_dict
 
     @property
     def hooks(self) -> Dict[str, HookInfo]:
         """Returns dictionary of connection_type-to-hook mapping"""
-        self.initialize_providers_manager()
+        self.initialize_providers_hooks()
         return self._hooks_dict
 
     @property
-    def extra_links_class_names(self):
+    def extra_links_class_names(self) -> Set[str]:
         """Returns set of extra link class names."""
-        self.initialize_providers_manager()
+        self.initialize_providers_extra_links()
         return sorted(self._extra_link_class_name_set)
 
     @property
     def connection_form_widgets(self) -> Dict[str, ConnectionFormWidgetInfo]:
         """Returns widgets for connection forms."""
-        self.initialize_providers_manager()
+        self.initialize_providers_hooks()
         return self._connection_form_widgets
 
     @property
     def field_behaviours(self) -> Dict[str, Dict]:
         """Returns dictionary with field behaviours for connection types."""
-        self.initialize_providers_manager()
+        self.initialize_providers_hooks()
         return self._field_behaviours

[airflow] 06/22: Stop attempting to pull base python image when pulling commit hash (#17231)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit fedce030f331aee85f4d356862715f245882dbb7
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 28 17:00:38 2021 +0200

    Stop attempting to pull base python image when pulling commit hash (#17231)
    
    When we publish latest images, we also publish Python base images
    for them, so that we know where the base images are taken from.
    This is not happening when we build "per-build" images - we only
    publish the resulting images rather than base python images because
    we do not need the base python images. This change was implemented
    after ghcr.io move and it was not reflected in --github-id
    switch handling, so ./breeze command with --github-id specified,
    failed trying to pull base python image with the same ID.
    
    This PR makes sure that we only pull base python image when we
    build/pull latest images, not the per-build ones.
    
    (cherry picked from commit 430a0f9229e32cadc9e02b48e2130e3cef5ba352)
---
 scripts/ci/libraries/_push_pull_remove_images.sh | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)

diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index 630b5cc..0e99b10 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -145,7 +145,10 @@ function push_pull_remove_images::pull_ci_images_if_needed() {
     python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true)
     if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true" || \
             ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
-        push_pull_remove_images::pull_base_python_image
+        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]]; then
+            # Pull base python image when building latest image
+            push_pull_remove_images::pull_base_python_image
+        fi
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
         push_pull_remove_images::pull_image_if_not_present_or_forced \
@@ -160,7 +163,10 @@ function push_pull_remove_images::pull_prod_images_if_needed() {
     python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true)
     if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true"  || \
             ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
-        push_pull_remove_images::pull_base_python_image
+        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]]; then
+            # Pull base python image when building latest image
+            push_pull_remove_images::pull_base_python_image
+        fi
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
         # "Build" segment of production image

[airflow] 08/22: Uses current sources when running k8s tests (#17289)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3c7ea9cad416e8bc2e4b507a86bd18728965455d
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 28 21:36:47 2021 +0200

    Uses current sources when running k8s tests (#17289)
    
    There was a bug in our CI - Kubernetes tests were executed running
    lates ghcr.io image, rather than the image built from sources of
    the current PR.
    
    This PR fixes it by correctly using the PR commit as tag of the
    image used as base image for kubernetes tests.
    
    (cherry picked from commit 396f30fda9ef0973ec03caeb9da274c30119a9a3)
---
 scripts/ci/libraries/_kind.sh | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index 970a6d3..d4910d9 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -258,15 +258,20 @@ function kind::check_cluster_ready_for_airflow() {
 
 function kind::build_image_for_kubernetes_tests() {
     cd "${AIRFLOW_SOURCES}" || exit 1
+    local image_tag="latest"
+    if [[ -n ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} ]]; then
+        image_tag="${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+    fi
+    echo "Building ${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest from ${AIRFLOW_PROD_IMAGE}:${image_tag}"
     docker_v build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest" . -f - <<EOF
-FROM ${AIRFLOW_PROD_IMAGE}
+FROM ${AIRFLOW_PROD_IMAGE}:${image_tag}
 
 COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/
 
 COPY airflow/kubernetes_executor_templates/ \${AIRFLOW_HOME}/pod_templates/
 
 EOF
-    echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest is prepared for test kubernetes deployment."
+    echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES}:${image_tag} is prepared for test kubernetes deployment."
 }
 
 function kind::load_image_to_kind_cluster() {

[airflow] 07/22: Do not use constraints when preparing venv for k8s tests on CI (#17290)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 63879619d26d8a91765bcc6c95d0b6c322772101
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 28 21:36:29 2021 +0200

    Do not use constraints when preparing venv for k8s tests on CI (#17290)
    
    When k8s virtualenv is prepared to run k8s tests we are using
    constraints, this however might lead to a problem when we increase
    minimum version of an affected dependency and it conflicts with
    the constraints stored in main.
    
    Therefore in case we run tests in CI (which is indicated by
    specific pull tag that we use) we do not use constraints for
    installing the kubernetes venv. It should be fine, as we are
    pretty much running this only as a vehicle to run tests.
    
    (cherry picked from commit 96bd4a11689e2dca052ec642010fe19c1cc830a9)
---
 scripts/ci/kubernetes/ci_run_kubernetes_tests.sh | 16 ++++++++++++----
 1 file changed, 12 insertions(+), 4 deletions(-)

diff --git a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
index 6cab719..a97f692 100755
--- a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
+++ b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
@@ -89,11 +89,19 @@ function create_virtualenv() {
 
     pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" "wheel==${WHEEL_VERSION}"
 
-    pip install pytest freezegun \
-      --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt"
+    local constraints=(
+        --constraint
+        "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt"
+    )
+    if [[ -n ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} ]]; then
+        # Disable constraints when building in CI with specific version of sources
+        # In case there will be conflicting constraints
+        constraints=()
+    fi
+
+    pip install pytest freezegun "${constraints[@]}"
 
-    pip install -e ".[cncf.kubernetes,postgres]" \
-      --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt"
+    pip install -e ".[cncf.kubernetes,postgres]" "${constraints[@]}"
 }
 
 function run_tests() {